# HG changeset patch # User Doug Simon # Date 1355344600 -3600 # Node ID 5d0bb7d5278388804e51a125ca73e29897b692ce # Parent c421c19b7bf835a85900a92e1e2bdce7e259931c changes to support Graal co-existing with the other HotSpot compiler(s) and being used for explicit compilation requests and code installation via the Graal API diff -r c421c19b7bf8 -r 5d0bb7d52783 README_GRAAL.txt --- a/README_GRAAL.txt Wed Dec 12 15:46:11 2012 +0100 +++ b/README_GRAAL.txt Wed Dec 12 21:36:40 2012 +0100 @@ -9,7 +9,7 @@ Building both the Java and C++ source code comprising the Graal VM can be done with the following simple command. - mx build +% mx build This builds the 'product' version of HotSpot with the Graal modifications. To build the debug or fastdebug versions: @@ -22,16 +22,49 @@ To run the VM, use 'mx vm' in place of the standard 'java' command: - mx vm ... +% mx vm ... To select the fastdebug or debug versions of the VM: - mx --fastdebug vm ... - mx --debug vm ... +% mx --fastdebug vm ... +% mx --debug vm ... Graal has an optional bootstrap step where it compiles itself before compiling any application code. This bootstrap step currently takes about 7 seconds on a fast x64 machine. It's useful to disable this bootstrap step when running small programs with the -XX:-BootstrapGraal options. For example: - mx vm -XX:-BootstrapGraal ... +% mx vm -XX:-BootstrapGraal ... + + +Other Build Configurations +-------------------------- + +By default the build commands above create a HotSpot binary where the Graal +is the only compiler. This binary is the Graal VM binary and identifies as +such with the -version option: + +% mx vm -XX:-BootstrapGraal -version +java version "1.7.0_07" +Java(TM) SE Runtime Environment (build 1.7.0_07-b10) +OpenJDK 64-Bit Graal VM (build 25.0-b10-internal, mixed mode) + +It's also possible to build and execute the standard HotSpot binaries +using the --vm option: + +% mx --vm server build +% mx --vm server vm -version +java version "1.7.0_07" +Java(TM) SE Runtime Environment (build 1.7.0_07-b10) +OpenJDK 64-Bit Server VM (build 25.0-b10-internal, mixed mode) + +These standard binaries still include the code necessary to support use of the +Graal compiler for explicit compilation requests. However, in this configuration +the Graal compiler will not service VM issued compilation requests (e.g., upon +counter overflow in the interpreter). + +To build a HotSpot binary that completely omits all VM support for Graal, +define an environment variable OMIT_GRAAL (its value does not matter) and build +with the --vm option as above (doing a clean first if necessary): + +% env OMIT_GRAAL= mx --vm server build diff -r c421c19b7bf8 -r 5d0bb7d52783 graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/GraalCompilerTest.java --- a/graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/GraalCompilerTest.java Wed Dec 12 15:46:11 2012 +0100 +++ b/graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/GraalCompilerTest.java Wed Dec 12 21:36:40 2012 +0100 @@ -298,16 +298,29 @@ } } - InstalledCode installedCode = Debug.scope("Compiling", new DebugDumpScope(String.valueOf(compilationId++), true), new Callable() { + + final int id = compilationId++; + + InstalledCode installedCode = Debug.scope("Compiling", new DebugDumpScope(String.valueOf(id), true), new Callable() { public InstalledCode call() throws Exception { + final boolean printCompilation = GraalOptions.PrintCompilation && !TTY.isSuppressed(); + if (printCompilation) { + TTY.println(String.format("@%-6d Graal %-70s %-45s %-50s ...", id, method.getDeclaringClass().getName(), method.getName(), method.getSignature())); + } + long start = System.currentTimeMillis(); PhasePlan phasePlan = new PhasePlan(); GraphBuilderPhase graphBuilderPhase = new GraphBuilderPhase(runtime, GraphBuilderConfiguration.getDefault(), OptimisticOptimizations.ALL); phasePlan.addPhase(PhasePosition.AFTER_PARSING, graphBuilderPhase); editPhasePlan(method, graph, phasePlan); CompilationResult compResult = graalCompiler.compileMethod(method, graph, null, phasePlan, OptimisticOptimizations.ALL); + if (printCompilation) { + TTY.println(String.format("@%-6d Graal %-70s %-45s %-50s | %4dms %5dB", id, "", "", "", System.currentTimeMillis() - start, compResult.getTargetCodeSize())); + } return addMethod(method, compResult); } }); + + cache.put(method, installedCode); return installedCode; } diff -r c421c19b7bf8 -r 5d0bb7d52783 graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java Wed Dec 12 15:46:11 2012 +0100 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java Wed Dec 12 21:36:40 2012 +0100 @@ -25,6 +25,7 @@ import static com.oracle.graal.amd64.AMD64.*; import static com.oracle.graal.api.code.CallingConvention.Type.*; import static com.oracle.graal.api.code.ValueUtil.*; +import static com.oracle.graal.phases.GraalOptions.*; import java.lang.reflect.*; @@ -212,13 +213,16 @@ // detects this case - see the definition of frame::should_be_deoptimized() Register scratch = regConfig.getScratchRegister(); + int offset = SafepointPollOffset % target.pageSize; if (config.isPollingPageFar) { - asm.movq(scratch, config.safepointPollingAddress); + asm.movq(scratch, config.safepointPollingAddress + offset); tasm.recordMark(Marks.MARK_POLL_RETURN_FAR); asm.movq(scratch, new Address(tasm.target.wordKind, scratch.asValue())); } else { tasm.recordMark(Marks.MARK_POLL_RETURN_NEAR); - asm.movq(scratch, new Address(tasm.target.wordKind, rip.asValue())); + // The C++ code transforms the polling page offset into an RIP displacement + // to the real address at that offset in the polling page. + asm.movq(scratch, new Address(tasm.target.wordKind, rip.asValue(), offset)); } } } diff -r c421c19b7bf8 -r 5d0bb7d52783 graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64SafepointOp.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64SafepointOp.java Wed Dec 12 15:46:11 2012 +0100 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64SafepointOp.java Wed Dec 12 21:36:40 2012 +0100 @@ -23,6 +23,7 @@ package com.oracle.graal.hotspot.amd64; import static com.oracle.graal.amd64.AMD64.*; +import static com.oracle.graal.phases.GraalOptions.*; import com.oracle.graal.api.code.*; import com.oracle.graal.asm.amd64.*; @@ -51,15 +52,18 @@ public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler asm) { Register scratch = tasm.frameMap.registerConfig.getScratchRegister(); int pos = asm.codeBuffer.position(); + int offset = SafepointPollOffset % tasm.target.pageSize; if (config.isPollingPageFar) { - asm.movq(scratch, config.safepointPollingAddress); + asm.movq(scratch, config.safepointPollingAddress + offset); tasm.recordMark(Marks.MARK_POLL_FAR); tasm.recordSafepoint(pos, state); asm.movq(scratch, new Address(tasm.target.wordKind, scratch.asValue())); } else { tasm.recordMark(Marks.MARK_POLL_NEAR); tasm.recordSafepoint(pos, state); - asm.movq(scratch, new Address(tasm.target.wordKind, rip.asValue())); + // The C++ code transforms the polling page offset into an RIP displacement + // to the real address at that offset in the polling page. + asm.movq(scratch, new Address(tasm.target.wordKind, rip.asValue(), offset)); } } } diff -r c421c19b7bf8 -r 5d0bb7d52783 graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotVMConfig.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotVMConfig.java Wed Dec 12 15:46:11 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotVMConfig.java Wed Dec 12 21:36:40 2012 +0100 @@ -38,8 +38,6 @@ public int codeEntryAlignment; public boolean verifyOops; public boolean useFastLocking; - public boolean useFastNewObjectArray; - public boolean useFastNewTypeArray; public boolean useTLAB; public boolean useBiasedLocking; @@ -289,6 +287,26 @@ public long logObjectStub; public long logPrintfStub; + public int deoptReasonNone; + public int deoptReasonNullCheck; + public int deoptReasonRangeCheck; + public int deoptReasonClassCheck; + public int deoptReasonArrayCheck; + public int deoptReasonUnreached0; + public int deoptReasonTypeCheckInlining; + public int deoptReasonOptimizedTypeCheck; + public int deoptReasonNotCompiledExceptionHandler; + public int deoptReasonUnresolved; + public int deoptReasonJsrMismatch; + public int deoptReasonDiv0Check; + public int deoptReasonConstraint; + + public int deoptActionNone; + public int deoptActionMaybeRecompile; + public int deoptActionReinterpret; + public int deoptActionMakeNotEntrant; + public int deoptActionMakeNotCompilable; + public void check() { assert vmPageSize >= 16; assert codeEntryAlignment > 0; diff -r c421c19b7bf8 -r 5d0bb7d52783 graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotMethodData.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotMethodData.java Wed Dec 12 15:46:11 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotMethodData.java Wed Dec 12 21:36:40 2012 +0100 @@ -164,6 +164,9 @@ } private abstract static class AbstractMethodData implements HotSpotMethodDataAccessor { + /** + * Corresponds to DS_RECOMPILE_BIT defined in deoptimization.cpp. + */ private static final int EXCEPTIONS_MASK = 0x80; private final int tag; @@ -323,9 +326,10 @@ private abstract static class AbstractTypeData extends CounterData { private static final int RECEIVER_TYPE_DATA_ROW_SIZE = cellsToBytes(2); - private static final int RECEIVER_TYPE_DATA_SIZE = cellIndexToOffset(1) + RECEIVER_TYPE_DATA_ROW_SIZE * config.typeProfileWidth; - private static final int RECEIVER_TYPE_DATA_FIRST_RECEIVER_OFFSET = cellIndexToOffset(1); - private static final int RECEIVER_TYPE_DATA_FIRST_COUNT_OFFSET = cellIndexToOffset(2); + private static final int RECEIVER_TYPE_DATA_SIZE = cellIndexToOffset(2) + RECEIVER_TYPE_DATA_ROW_SIZE * config.typeProfileWidth; + protected static final int NONPROFILED_RECEIVER_COUNT_OFFSET = cellIndexToOffset(1); + private static final int RECEIVER_TYPE_DATA_FIRST_RECEIVER_OFFSET = cellIndexToOffset(2); + private static final int RECEIVER_TYPE_DATA_FIRST_COUNT_OFFSET = cellIndexToOffset(3); protected AbstractTypeData(int tag) { super(tag, RECEIVER_TYPE_DATA_SIZE); @@ -356,11 +360,7 @@ return createTypeProfile(types, counts, totalCount, entries); } - protected long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position) { - // checkcast/aastore/instanceof profiling in the HotSpot template-based interpreter was adjusted so that the counter - // is incremented to indicate the polymorphic case instead of decrementing it for failed type checks - return getCounterValue(data, position); - } + protected abstract long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position); private static JavaTypeProfile createTypeProfile(ResolvedJavaType[] types, long[] counts, long totalCount, int entries) { if (entries <= 0 || totalCount < GraalOptions.MatureExecutionsTypeProfile) { @@ -402,6 +402,11 @@ public int getExecutionCount(HotSpotMethodData data, int position) { return -1; } + + @Override + protected long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position) { + return data.readUnsignedIntAsSignedInt(position, NONPROFILED_RECEIVER_COUNT_OFFSET); + } } private static class VirtualCallData extends AbstractTypeData { @@ -423,6 +428,11 @@ total += getCounterValue(data, position); return truncateLongToInt(total); } + + @Override + protected long getTypesNotRecordedExecutionCount(HotSpotMethodData data, int position) { + return getCounterValue(data, position); + } } private static class RetData extends CounterData { diff -r c421c19b7bf8 -r 5d0bb7d52783 graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java Wed Dec 12 15:46:11 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java Wed Dec 12 21:36:40 2012 +0100 @@ -839,33 +839,31 @@ } public int convertDeoptAction(DeoptimizationAction action) { - // This must be kept in sync with the DeoptAction enum defined in deoptimization.hpp switch(action) { - case None: return 0; - case RecompileIfTooManyDeopts: return 1; - case InvalidateReprofile: return 2; - case InvalidateRecompile: return 3; - case InvalidateStopCompiling: return 4; + case None: return config.deoptActionNone; + case RecompileIfTooManyDeopts: return config.deoptActionMaybeRecompile; + case InvalidateReprofile: return config.deoptActionReinterpret; + case InvalidateRecompile: return config.deoptActionMakeNotEntrant; + case InvalidateStopCompiling: return config.deoptActionMakeNotCompilable; default: throw GraalInternalError.shouldNotReachHere(); } } public int convertDeoptReason(DeoptimizationReason reason) { - // This must be kept in sync with the DeoptReason enum defined in deoptimization.hpp switch(reason) { - case None: return 0; - case NullCheckException: return 1; - case BoundsCheckException: return 2; - case ClassCastException: return 3; - case ArrayStoreException: return 4; - case UnreachedCode: return 5; - case TypeCheckedInliningViolated: return 6; - case OptimizedTypeCheckViolated: return 7; - case NotCompiledExceptionHandler: return 8; - case Unresolved: return 9; - case JavaSubroutineMismatch: return 10; - case ArithmeticException: return 11; - case RuntimeConstraint: return 12; + case None: return config.deoptReasonNone; + case NullCheckException: return config.deoptReasonNullCheck; + case BoundsCheckException: return config.deoptReasonRangeCheck; + case ClassCastException: return config.deoptReasonClassCheck; + case ArrayStoreException: return config.deoptReasonArrayCheck; + case UnreachedCode: return config.deoptReasonUnreached0; + case TypeCheckedInliningViolated: return config.deoptReasonTypeCheckInlining; + case OptimizedTypeCheckViolated: return config.deoptReasonOptimizedTypeCheck; + case NotCompiledExceptionHandler: return config.deoptReasonNotCompiledExceptionHandler; + case Unresolved: return config.deoptReasonUnresolved; + case JavaSubroutineMismatch: return config.deoptReasonJsrMismatch; + case ArithmeticException: return config.deoptReasonDiv0Check; + case RuntimeConstraint: return config.deoptReasonConstraint; default: throw GraalInternalError.shouldNotReachHere(); } } diff -r c421c19b7bf8 -r 5d0bb7d52783 graal/com.oracle.graal.phases/src/com/oracle/graal/phases/GraalOptions.java --- a/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/GraalOptions.java Wed Dec 12 15:46:11 2012 +0100 +++ b/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/GraalOptions.java Wed Dec 12 21:36:40 2012 +0100 @@ -171,6 +171,7 @@ public static boolean AlignCallsForPatching = true; public static boolean ResolveClassBeforeStaticInvoke = true; public static boolean CanOmitFrame = true; + public static int SafepointPollOffset = 256; // Translating tableswitch instructions public static int MinimumJumpTableSize = 5; diff -r c421c19b7bf8 -r 5d0bb7d52783 make/bsd/makefiles/compiler1.make --- a/make/bsd/makefiles/compiler1.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/bsd/makefiles/compiler1.make Wed Dec 12 21:36:40 2012 +0100 @@ -29,3 +29,7 @@ VM_SUBDIR = client CFLAGS += -DCOMPILER1 + +ifndef OMIT_GRAAL + CFLAGS += -DGRAAL +endif \ No newline at end of file diff -r c421c19b7bf8 -r 5d0bb7d52783 make/bsd/makefiles/compiler2.make --- a/make/bsd/makefiles/compiler2.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/bsd/makefiles/compiler2.make Wed Dec 12 21:36:40 2012 +0100 @@ -29,3 +29,7 @@ VM_SUBDIR = server CFLAGS += -DCOMPILER2 + +ifndef OMIT_GRAAL + CFLAGS += -DGRAAL +endif \ No newline at end of file diff -r c421c19b7bf8 -r 5d0bb7d52783 make/bsd/makefiles/tiered.make --- a/make/bsd/makefiles/tiered.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/bsd/makefiles/tiered.make Wed Dec 12 21:36:40 2012 +0100 @@ -29,3 +29,7 @@ VM_SUBDIR = server CFLAGS += -DCOMPILER2 -DCOMPILER1 + +ifndef OMIT_GRAAL + CFLAGS += -DGRAAL +endif diff -r c421c19b7bf8 -r 5d0bb7d52783 make/bsd/makefiles/vm.make --- a/make/bsd/makefiles/vm.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/bsd/makefiles/vm.make Wed Dec 12 21:36:40 2012 +0100 @@ -180,9 +180,9 @@ # Include dirs per type. Src_Dirs/CORE := $(CORE_PATHS) -Src_Dirs/COMPILER1 := $(CORE_PATHS) $(COMPILER1_PATHS) -Src_Dirs/COMPILER2 := $(CORE_PATHS) $(COMPILER2_PATHS) -Src_Dirs/TIERED := $(CORE_PATHS) $(COMPILER1_PATHS) $(COMPILER2_PATHS) +Src_Dirs/COMPILER1 := $(CORE_PATHS) $(COMPILER1_PATHS) $(GRAAL_PATHS) +Src_Dirs/COMPILER2 := $(CORE_PATHS) $(COMPILER2_PATHS) $(GRAAL_PATHS) +Src_Dirs/TIERED := $(CORE_PATHS) $(COMPILER1_PATHS) $(COMPILER2_PATHS) $(GRAAL_PATHS) Src_Dirs/ZERO := $(CORE_PATHS) Src_Dirs/SHARK := $(CORE_PATHS) $(SHARK_PATHS) Src_Dirs/GRAAL := $(CORE_PATHS) $(GRAAL_PATHS) @@ -192,7 +192,12 @@ COMPILER1_SPECIFIC_FILES := c1_\* SHARK_SPECIFIC_FILES := shark ZERO_SPECIFIC_FILES := zero -GRAAL_SPECIFIC_FILES := graal\* + +ifdef OMIT_GRAAL + GRAAL_SPECIFIC_FILES := graal\* +else + GRAAL_SPECIFIC_FILES := +endif # Always exclude these. Src_Files_EXCLUDE += jsig.c jvmtiEnvRecommended.cpp jvmtiEnvStub.cpp diff -r c421c19b7bf8 -r 5d0bb7d52783 make/linux/makefiles/compiler1.make --- a/make/linux/makefiles/compiler1.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/linux/makefiles/compiler1.make Wed Dec 12 21:36:40 2012 +0100 @@ -29,3 +29,7 @@ VM_SUBDIR = client CFLAGS += -DCOMPILER1 + +ifndef OMIT_GRAAL + CFLAGS += -DGRAAL +endif \ No newline at end of file diff -r c421c19b7bf8 -r 5d0bb7d52783 make/linux/makefiles/compiler2.make --- a/make/linux/makefiles/compiler2.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/linux/makefiles/compiler2.make Wed Dec 12 21:36:40 2012 +0100 @@ -29,3 +29,7 @@ VM_SUBDIR = server CFLAGS += -DCOMPILER2 + +ifndef OMIT_GRAAL + CFLAGS += -DGRAAL +endif \ No newline at end of file diff -r c421c19b7bf8 -r 5d0bb7d52783 make/linux/makefiles/tiered.make --- a/make/linux/makefiles/tiered.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/linux/makefiles/tiered.make Wed Dec 12 21:36:40 2012 +0100 @@ -29,3 +29,7 @@ VM_SUBDIR = server CFLAGS += -DCOMPILER2 -DCOMPILER1 + +ifndef OMIT_GRAAL + CFLAGS += -DGRAAL +endif \ No newline at end of file diff -r c421c19b7bf8 -r 5d0bb7d52783 make/linux/makefiles/vm.make --- a/make/linux/makefiles/vm.make Wed Dec 12 15:46:11 2012 +0100 +++ b/make/linux/makefiles/vm.make Wed Dec 12 21:36:40 2012 +0100 @@ -182,9 +182,9 @@ # Include dirs per type. Src_Dirs/CORE := $(CORE_PATHS) -Src_Dirs/COMPILER1 := $(CORE_PATHS) $(COMPILER1_PATHS) -Src_Dirs/COMPILER2 := $(CORE_PATHS) $(COMPILER2_PATHS) -Src_Dirs/TIERED := $(CORE_PATHS) $(COMPILER1_PATHS) $(COMPILER2_PATHS) +Src_Dirs/COMPILER1 := $(CORE_PATHS) $(COMPILER1_PATHS) $(GRAAL_PATHS) +Src_Dirs/COMPILER2 := $(CORE_PATHS) $(COMPILER2_PATHS) $(GRAAL_PATHS) +Src_Dirs/TIERED := $(CORE_PATHS) $(COMPILER1_PATHS) $(COMPILER2_PATHS) $(GRAAL_PATHS) Src_Dirs/ZERO := $(CORE_PATHS) Src_Dirs/SHARK := $(CORE_PATHS) $(SHARK_PATHS) Src_Dirs/GRAAL := $(CORE_PATHS) $(GRAAL_PATHS) @@ -194,7 +194,12 @@ COMPILER1_SPECIFIC_FILES := c1_\* SHARK_SPECIFIC_FILES := shark ZERO_SPECIFIC_FILES := zero -GRAAL_SPECIFIC_FILES := graal\* + +ifdef OMIT_GRAAL + GRAAL_SPECIFIC_FILES := graal\* +else + GRAAL_SPECIFIC_FILES := +endif # Always exclude these. Src_Files_EXCLUDE += jsig.c jvmtiEnvRecommended.cpp jvmtiEnvStub.cpp diff -r c421c19b7bf8 -r 5d0bb7d52783 mx/commands.py --- a/mx/commands.py Wed Dec 12 15:46:11 2012 +0100 +++ b/mx/commands.py Wed Dec 12 21:36:40 2012 +0100 @@ -550,6 +550,8 @@ env.setdefault('LANG', 'C') env.setdefault('HOTSPOT_BUILD_JOBS', str(cpus)) env['ALT_BOOTDIR'] = jdk + if not env.has_key('OMIT_GRAAL'): + env['GRAAL'] = join(_graal_home, 'graal') # needed for TEST_IN_BUILD env.setdefault('INSTALL', 'y') if mx.get_os() == 'solaris' : # If using sparcWorks, setup flags to avoid make complaining about CC version diff -r c421c19b7bf8 -r 5d0bb7d52783 src/cpu/x86/vm/graalGlobals_x86.hpp --- a/src/cpu/x86/vm/graalGlobals_x86.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/cpu/x86/vm/graalGlobals_x86.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -31,6 +31,13 @@ // Sets the default values for platform dependent flags used by the Graal compiler. // (see graalGlobals.hpp) +#ifndef COMPILER2 +define_pd_global(intx, TypeProfileWidth, 8); +#endif + +define_pd_global(intx, GraalSafepointPollOffset, 256 ); + +#if !defined(COMPILER1) && !defined(COMPILER2) define_pd_global(bool, BackgroundCompilation, true ); define_pd_global(bool, UseTLAB, true ); define_pd_global(bool, ResizeTLAB, true ); @@ -44,27 +51,17 @@ define_pd_global(intx, NewSizeThreadIncrease, 4*K ); define_pd_global(uintx,MetaspaceSize, 12*M ); define_pd_global(uintx,MaxPermSize, 64*M ); -define_pd_global(bool, NeverActAsServerClassMachine, true ); +define_pd_global(bool, NeverActAsServerClassMachine, false); define_pd_global(uint64_t,MaxRAM, 1ULL*G); define_pd_global(bool, CICompileOSR, true ); - define_pd_global(bool, ProfileTraps, true ); -define_pd_global(bool, UseOnStackReplacement, true); -define_pd_global(intx, CompileThreshold, 10000 ); -define_pd_global(intx, InitialCodeCacheSize, 16*M ); +define_pd_global(bool, UseOnStackReplacement, true ); +define_pd_global(intx, CompileThreshold, 10000); +define_pd_global(intx, InitialCodeCacheSize, 16*M ); define_pd_global(intx, ReservedCodeCacheSize, 64*M ); define_pd_global(bool, ProfileInterpreter, true ); define_pd_global(intx, CodeCacheExpansionSize, 64*K ); define_pd_global(uintx,CodeCacheMinBlockLength, 4); -define_pd_global(intx, TypeProfileWidth, 8); - -define_pd_global(bool, RoundFPResults, true ); - -define_pd_global(bool, LIRFillDelaySlots, false); -define_pd_global(bool, OptimizeSinglePrecision, true ); -define_pd_global(bool, CSEArrayLength, false); -define_pd_global(bool, TwoOperandLIRForm, true ); - -define_pd_global(intx, SafepointPollOffset, 256 ); +#endif #endif // CPU_X86_VM_GRAALGLOBALS_X86_HPP diff -r c421c19b7bf8 -r 5d0bb7d52783 src/cpu/x86/vm/graalRuntime_x86.cpp --- a/src/cpu/x86/vm/graalRuntime_x86.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/cpu/x86/vm/graalRuntime_x86.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -111,23 +111,12 @@ if (metadata_result->is_valid()) { movptr(Address(thread, JavaThread::vm_result_2_offset()), NULL_WORD); } -#ifdef GRAAL // (thomaswue) Deoptimize in case of an exception. restore_live_registers(this, false); movptr(Address(thread, Thread::pending_exception_offset()), NULL_WORD); leave(); movl(rscratch1, Deoptimization::make_trap_request(Deoptimization::Reason_constraint, Deoptimization::Action_reinterpret)); jump(RuntimeAddress(SharedRuntime::deopt_blob()->uncommon_trap())); -#else - if (frame_size() == no_frame_size) { - leave(); - jump(RuntimeAddress(StubRoutines::forward_exception_entry())); - } else if (_stub_id == GraalRuntime::forward_exception_id) { - should_not_reach_here(); - } else { - jump(RuntimeAddress(GraalRuntime::entry_for(GraalRuntime::forward_exception_id))); - } -#endif bind(L); } // get oop results if there are any and reset the values in the thread diff -r c421c19b7bf8 -r 5d0bb7d52783 src/cpu/x86/vm/interp_masm_x86_64.cpp --- a/src/cpu/x86/vm/interp_masm_x86_64.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/cpu/x86/vm/interp_masm_x86_64.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -1149,17 +1149,15 @@ Register receiver, Register mdp, Register reg2, int start_row, Label& done, bool is_virtual_call) { -#ifdef GRAAL - // change for GRAAL (use counter to indicate polymorphic case instead of failed typechecks) - bool use_counter_for_polymorphic_case = true; -#else - bool use_counter_for_polymorphic_case = is_virtual_call; -#endif - if (TypeProfileWidth == 0) { - if (use_counter_for_polymorphic_case) { + if (is_virtual_call) { increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset())); } +#ifdef GRAAL + else { + increment_mdp_data_at(mdp, in_bytes(ReceiverTypeData::nonprofiled_receiver_count_offset())); + } +#endif return; } @@ -1194,11 +1192,17 @@ testptr(reg2, reg2); if (start_row == last_row) { // The only thing left to do is handle the null case. - if (use_counter_for_polymorphic_case) { + if (is_virtual_call GRAAL_ONLY(|| true)) { jccb(Assembler::zero, found_null); // Receiver did not match any saved receiver and there is no empty row for it. // Increment total counter to indicate polymorphic case. - increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset())); + int offset = in_bytes(CounterData::count_offset()); +#ifdef GRAAL + if (!is_virtual_call) { + offset = in_bytes(ReceiverTypeData::nonprofiled_receiver_count_offset()); + } +#endif + increment_mdp_data_at(mdp, offset); jmp(done); bind(found_null); } else { @@ -1327,8 +1331,6 @@ void InterpreterMacroAssembler::profile_typecheck_failed(Register mdp) { -// changed for GRAAL (use counter to indicate polymorphism instead of failed typechecks) -#ifndef GRAAL if (ProfileInterpreter && TypeProfileCasts) { Label profile_continue; @@ -1344,7 +1346,6 @@ bind (profile_continue); } -#endif } diff -r c421c19b7bf8 -r 5d0bb7d52783 src/cpu/x86/vm/sharedRuntime_x86_64.cpp --- a/src/cpu/x86/vm/sharedRuntime_x86_64.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/cpu/x86/vm/sharedRuntime_x86_64.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -1996,7 +1996,7 @@ int vep_offset = ((intptr_t)__ pc()) - start; -#ifdef GRAAL +#ifdef GRAALVM if (InlineObjectHash && (method->intrinsic_id() == vmIntrinsics::_hashCode || method->intrinsic_id() == vmIntrinsics::_identityHashCode)) { // Object.hashCode can pull the hashCode from the header word // instead of doing a full VM transition once it's been computed. @@ -2038,7 +2038,7 @@ __ bind (slowCase); } -#endif // GRAAL +#endif // GRAALVM // The instruction at the verified entry point must be 5 bytes or longer // because it can be patched on the fly by make_non_entrant. The stack bang @@ -3420,7 +3420,7 @@ __ push(0); // Save everything in sight. - RegisterSaver::save_live_registers(masm, 0, &frame_size_in_words); + map = RegisterSaver::save_live_registers(masm, 0, &frame_size_in_words); // Now it is safe to overwrite any register diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/c1/c1_Compiler.cpp --- a/src/share/vm/c1/c1_Compiler.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/c1/c1_Compiler.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -44,7 +44,7 @@ volatile int Compiler::_runtimes = uninitialized; -Compiler::Compiler() { +Compiler::Compiler() : AbstractCompiler(c1) { } diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/c1/c1_globals.hpp --- a/src/share/vm/c1/c1_globals.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/c1/c1_globals.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -56,8 +56,6 @@ // #define C1_FLAGS(develop, develop_pd, product, product_pd, notproduct) \ \ - product(bool, TraceSignals, false, \ - "Trace signals and implicit exception handling") \ /* Printing */ \ notproduct(bool, PrintC1Statistics, false, \ "Print Compiler1 statistics" ) \ diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/code/codeBlob.cpp --- a/src/share/vm/code/codeBlob.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/code/codeBlob.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -384,9 +384,7 @@ _unpack_offset = unpack_offset; _unpack_with_exception = unpack_with_exception_offset; _unpack_with_reexecution = unpack_with_reexecution_offset; -#if defined(COMPILER1) || defined(GRAAL) _unpack_with_exception_in_tls = -1; -#endif } diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/code/nmethod.cpp --- a/src/share/vm/code/nmethod.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/code/nmethod.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -868,27 +868,28 @@ #ifdef GRAAL _graal_installed_code = installed_code(); - - // Graal might not produce any stub sections - if (offsets->value(CodeOffsets::Exceptions) != -1) { - _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions); - } else { - _exception_offset = -1; - } - if (offsets->value(CodeOffsets::Deopt) != -1) { - _deoptimize_offset = code_offset() + offsets->value(CodeOffsets::Deopt); +#endif + if (compiler->is_graal()) { + // Graal might not produce any stub sections + if (offsets->value(CodeOffsets::Exceptions) != -1) { + _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions); + } else { + _exception_offset = -1; + } + if (offsets->value(CodeOffsets::Deopt) != -1) { + _deoptimize_offset = code_offset() + offsets->value(CodeOffsets::Deopt); + } else { + _deoptimize_offset = -1; + } + if (offsets->value(CodeOffsets::DeoptMH) != -1) { + _deoptimize_mh_offset = code_offset() + offsets->value(CodeOffsets::DeoptMH); + } else { + _deoptimize_mh_offset = -1; + } } else { - _deoptimize_offset = -1; - } - if (offsets->value(CodeOffsets::DeoptMH) != -1) { - _deoptimize_mh_offset = code_offset() + offsets->value(CodeOffsets::DeoptMH); - } else { - _deoptimize_mh_offset = -1; - } -#else - // Exception handler and deopt handler are in the stub section - assert(offsets->value(CodeOffsets::Exceptions) != -1, "must be set"); - assert(offsets->value(CodeOffsets::Deopt ) != -1, "must be set"); + // Exception handler and deopt handler are in the stub section + assert(offsets->value(CodeOffsets::Exceptions) != -1, "must be set"); + assert(offsets->value(CodeOffsets::Deopt ) != -1, "must be set"); _exception_offset = _stub_offset + offsets->value(CodeOffsets::Exceptions); _deoptimize_offset = _stub_offset + offsets->value(CodeOffsets::Deopt); @@ -897,7 +898,7 @@ } else { _deoptimize_mh_offset = -1; } -#endif + } if (offsets->value(CodeOffsets::UnwindHandler) != -1) { _unwind_handler_offset = code_offset() + offsets->value(CodeOffsets::UnwindHandler); } else { @@ -1241,9 +1242,7 @@ } void nmethod::inc_decompile_count() { -#ifndef GRAAL - if (!is_compiled_by_c2()) return; -#endif + if (!is_compiled_by_c2() && !is_compiled_by_graal()) return; // Could be gated by ProfileTraps, but do not bother... Method* m = method(); if (m == NULL) return; diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/code/nmethod.hpp --- a/src/share/vm/code/nmethod.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/code/nmethod.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -118,8 +118,6 @@ #ifdef GRAAL // Needed to keep nmethods alive that are not the default nmethod for the associated Method. - // This field is initialized to Universe::non_oop_word() so that a non-default nmethod - // is not unloaded between being allocated and having this field set in the Graal specific code oop _graal_installed_code; #endif diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/code/pcDesc.cpp --- a/src/share/vm/code/pcDesc.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/code/pcDesc.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -44,11 +44,11 @@ void PcDesc::print(nmethod* code) { #ifndef PRODUCT ResourceMark rm; -#ifdef GRAAL - tty->print_cr("PcDesc(pc=0x%lx offset=%x bits=%x leaf_graph_id=%d):", real_pc(code), pc_offset(), _flags, leaf_graph_id()); -#else - tty->print_cr("PcDesc(pc=0x%lx offset=%x bits=%x):", real_pc(code), pc_offset(), _flags); -#endif + if (code->is_compiled_by_graal()) { + tty->print_cr("PcDesc(pc=0x%lx offset=%x bits=%x leaf_graph_id=%d):", real_pc(code), pc_offset(), _flags, leaf_graph_id()); + } else { + tty->print_cr("PcDesc(pc=0x%lx offset=%x bits=%x):", real_pc(code), pc_offset(), _flags); + } if (scope_decode_offset() == DebugInformationRecorder::serialized_null) { return; diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/compiler/abstractCompiler.hpp --- a/src/share/vm/compiler/abstractCompiler.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/compiler/abstractCompiler.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -37,12 +37,20 @@ // Used for tracking global state of compiler runtime initialization enum { uninitialized, initializing, initialized }; + // The (closed set) of concrete compiler classes. Using an tag like this + // avoids a confusing use of macros around the definition of the + // 'is_' methods. + enum Type { c1, c2, shark, graal }; + // This method will call the initialization method "f" once (per compiler class/subclass) // and do so without holding any locks void initialize_runtimes(initializer f, volatile int* state); + private: + Type _type; + public: - AbstractCompiler() : _is_initialized(false) {} + AbstractCompiler(Type type) : _is_initialized(false), _type(type) {} // Name of this compiler virtual const char* name() = 0; @@ -53,37 +61,10 @@ virtual bool supports_native() { return true; } virtual bool supports_osr () { return true; } -#if defined(TIERED) || ( !defined(COMPILER1) && !defined(COMPILER2) && !defined(SHARK) && !defined(GRAAL)) - virtual bool is_c1 () { return false; } - virtual bool is_c2 () { return false; } - virtual bool is_shark() { return false; } - virtual bool is_graal() { return false; } -#else -#ifdef COMPILER1 - bool is_c1 () { return true; } - bool is_c2 () { return false; } - bool is_shark() { return false; } - bool is_graal() { return false; } -#endif // COMPILER1 -#ifdef COMPILER2 - bool is_c1 () { return false; } - bool is_c2 () { return true; } - bool is_shark() { return false; } - bool is_graal() { return false; } -#endif // COMPILER2 -#ifdef SHARK - bool is_c1 () { return false; } - bool is_c2 () { return false; } - bool is_shark() { return true; } - bool is_graal() { return false; } -#endif // SHARK -#ifdef GRAAL - bool is_c1 () { return false; } - bool is_c2 () { return false; } - bool is_shark() { return false; } - bool is_graal() { return true; } -#endif // GRAAL -#endif // TIERED + bool is_c1 () { return _type == c1; } + bool is_c2 () { return _type == c2; } + bool is_shark() { return _type == shark; } + bool is_graal() { return _type == graal; } void mark_initialized() { _is_initialized = true; } bool is_initialized() { return _is_initialized; } diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/compiler/compileBroker.cpp --- a/src/share/vm/compiler/compileBroker.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/compiler/compileBroker.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -753,8 +753,13 @@ // Set the interface to the current compiler(s). int c1_count = CompilationPolicy::policy()->compiler_count(CompLevel_simple); int c2_count = CompilationPolicy::policy()->compiler_count(CompLevel_full_optimization); -#if defined(GRAAL) - _compilers[0] = new GraalCompiler(); + +#ifdef GRAAL + GraalCompiler* graal = new GraalCompiler(); +#endif + +#if defined(GRAALVM) + _compilers[0] = graal; c1_count = 0; c2_count = 0; #elif defined(COMPILER1) @@ -971,9 +976,9 @@ // Initialize the compilation queue void CompileBroker::init_compiler_threads(int c1_compiler_count, int c2_compiler_count) { EXCEPTION_MARK; -#if !defined(ZERO) && !defined(SHARK) && !defined(GRAAL) +#if !defined(ZERO) && !defined(SHARK) && !defined(GRAALVM) assert(c2_compiler_count > 0 || c1_compiler_count > 0, "No compilers?"); -#endif // !ZERO && !SHARK +#endif // !ZERO && !SHARK && !GRAALVM if (c2_compiler_count > 0) { _c2_method_queue = new CompileQueue("C2MethodQueue", MethodCompileQueue_lock); } @@ -1024,7 +1029,6 @@ // ------------------------------------------------------------------ // CompileBroker::is_idle bool CompileBroker::is_idle() { -#ifndef GRAAL if (_c2_method_queue != NULL && !_c2_method_queue->is_empty()) { return false; } else if (_c1_method_queue != NULL && !_c1_method_queue->is_empty()) { @@ -1037,7 +1041,6 @@ } } } -#endif // No pending or active compilations. return true; } @@ -1122,7 +1125,7 @@ if (InstanceRefKlass::owns_pending_list_lock(JavaThread::current())) { return; } -#ifdef GRAAL +#ifdef GRAALVM if (!JavaThread::current()->is_compiling()) { method->set_queued_for_compilation(); GraalCompiler::instance()->compile_method(method, osr_bci, is_compile_blocking(method, osr_bci)); diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalCodeInstaller.cpp --- a/src/share/vm/graal/graalCodeInstaller.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalCodeInstaller.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -448,6 +448,22 @@ } } +// If deoptimization happens, the interpreter should reexecute these bytecodes. +// This function mainly helps the compilers to set up the reexecute bit. +static bool bytecode_should_reexecute(Bytecodes::Code code) { + switch (code) { + case Bytecodes::_invokedynamic: + case Bytecodes::_invokevirtual: + case Bytecodes::_invokeinterface: + case Bytecodes::_invokespecial: + case Bytecodes::_invokestatic: + return false; + default: + return true; + } + return true; +} + void CodeInstaller::record_scope(jint pc_offset, oop frame, GrowableArray* objects) { assert(frame->klass() == BytecodeFrame::klass(), "BytecodeFrame expected"); oop caller_frame = BytecodePosition::caller(frame); @@ -463,7 +479,7 @@ reexecute = false; } else { Bytecodes::Code code = Bytecodes::java_code_at(method, method->bcp_from(bci)); - reexecute = Interpreter::bytecode_should_reexecute(code); + reexecute = bytecode_should_reexecute(code); if (frame != NULL) { reexecute = (BytecodeFrame::duringCall(frame) == JNI_FALSE); } @@ -797,7 +813,8 @@ case MARK_POLL_NEAR: { NativeInstruction* ni = nativeInstruction_at(instruction); int32_t* disp = (int32_t*) Assembler::locate_operand(instruction, Assembler::disp32_operand); - intptr_t new_disp = (intptr_t) (os::get_polling_page() + (SafepointPollOffset % os::vm_page_size())) - (intptr_t) ni; + int32_t offset = *disp; // The Java code installed the polling page offset into the disp32 operand + intptr_t new_disp = (intptr_t) (os::get_polling_page() + offset) - (intptr_t) ni; *disp = (int32_t)new_disp; } case MARK_POLL_FAR: @@ -806,43 +823,13 @@ case MARK_POLL_RETURN_NEAR: { NativeInstruction* ni = nativeInstruction_at(instruction); int32_t* disp = (int32_t*) Assembler::locate_operand(instruction, Assembler::disp32_operand); - intptr_t new_disp = (intptr_t) (os::get_polling_page() + (SafepointPollOffset % os::vm_page_size())) - (intptr_t) ni; + int32_t offset = *disp; // The Java code installed the polling page offset into the disp32 operand + intptr_t new_disp = (intptr_t) (os::get_polling_page() + offset) - (intptr_t) ni; *disp = (int32_t)new_disp; } case MARK_POLL_RETURN_FAR: _instructions->relocate(instruction, relocInfo::poll_return_type); break; - case MARK_KLASS_PATCHING: - case MARK_ACCESS_FIELD_PATCHING: { - unsigned char* byte_count = (unsigned char*) (instruction - 1); - unsigned char* byte_skip = (unsigned char*) (instruction - 2); - unsigned char* being_initialized_entry_offset = (unsigned char*) (instruction - 3); - - assert(*byte_skip == 5, "unexpected byte_skip"); - - assert(references->length() == 2, "MARK_KLASS_PATCHING/MARK_ACCESS_FIELD_PATCHING needs 2 references"); - oop ref1 = ((oop*) references->base(T_OBJECT))[0]; - oop ref2 = ((oop*) references->base(T_OBJECT))[1]; - int i_byte_count = CompilationResult_Site::pcOffset(ref2) - CompilationResult_Site::pcOffset(ref1); - assert(i_byte_count == (unsigned char)i_byte_count, "invalid offset"); - *byte_count = i_byte_count; - *being_initialized_entry_offset = *byte_count + *byte_skip; - - // we need to correct the offset of a field access - it's created with MAX_INT to ensure the correct size, and HotSpot expects 0 - if (id == MARK_ACCESS_FIELD_PATCHING) { - NativeMovRegMem* inst = nativeMovRegMem_at(_instructions->start() + CompilationResult_Site::pcOffset(ref1)); - assert(inst->offset() == max_jint, "unexpected offset value"); - inst->set_offset(0); - } - break; - } - case MARK_DUMMY_OOP_RELOCATION: { - _instructions->relocate(instruction, oop_Relocation::spec_for_immediate(), Assembler::imm_operand); - - RelocIterator iter(_instructions, (address) instruction, (address) (instruction + 1)); - relocInfo::change_reloc_info_for_address(&iter, (address) instruction, relocInfo::oop_type, relocInfo::none); - break; - } default: ShouldNotReachHere(); break; diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalCodeInstaller.hpp --- a/src/share/vm/graal/graalCodeInstaller.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalCodeInstaller.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -49,9 +49,6 @@ MARK_POLL_RETURN_NEAR = 0x3002, MARK_POLL_FAR = 0x3003, MARK_POLL_RETURN_FAR = 0x3004, - MARK_KLASS_PATCHING = 0x4000, - MARK_DUMMY_OOP_RELOCATION = 0x4001, - MARK_ACCESS_FIELD_PATCHING = 0x4002 }; Arena _arena; diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalCompiler.cpp --- a/src/share/vm/graal/graalCompiler.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalCompiler.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -36,7 +36,7 @@ GraalCompiler* GraalCompiler::_instance = NULL; -GraalCompiler::GraalCompiler() { +GraalCompiler::GraalCompiler() : AbstractCompiler(graal) { _initialized = false; assert(_instance == NULL, "only one instance allowed"); _instance = this; diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalCompilerToVM.cpp --- a/src/share/vm/graal/graalCompilerToVM.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalCompilerToVM.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -589,15 +589,13 @@ guarantee(HeapWordSize == sizeof(char*), "Graal assumption that HeadWordSize == machine word size is wrong"); #ifdef _WIN64 - set_boolean(env, config, "windowsOs", true); + set_boolean("windowsOs", true); #else set_boolean("windowsOs", false); #endif set_boolean("verifyOops", VerifyOops); - set_boolean("useFastLocking", UseFastLocking); - set_boolean("useFastNewObjectArray", UseFastNewObjectArray); + set_boolean("useFastLocking", GraalUseFastLocking); set_boolean("useBiasedLocking", UseBiasedLocking); - set_boolean("useFastNewTypeArray", UseFastNewTypeArray); set_boolean("useTLAB", UseTLAB); set_int("codeEntryAlignment", CodeEntryAlignment); set_int("vmPageSize", os::vm_page_size()); @@ -628,7 +626,6 @@ set_int("klassHasFinalizerFlag", JVM_ACC_HAS_FINALIZER); set_int("threadExceptionOopOffset", in_bytes(JavaThread::exception_oop_offset())); set_int("threadExceptionPcOffset", in_bytes(JavaThread::exception_pc_offset())); - set_long("safepointPollingAddress", (jlong)(os::get_polling_page() + (SafepointPollOffset % os::vm_page_size()))); set_boolean("isPollingPageFar", Assembler::is_polling_page_far()); set_int("classMirrorOffset", in_bytes(Klass::java_mirror_offset())); set_int("runtimeCallStackSize", (jint)frame::arg_reg_save_area_bytes); @@ -710,6 +707,26 @@ set_long("logObjectStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_log_object_id))); set_long("logPrintfStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_log_printf_id))); + set_int("deoptReasonNone", Deoptimization::Reason_none); + set_int("deoptReasonNullCheck", Deoptimization::Reason_null_check); + set_int("deoptReasonRangeCheck", Deoptimization::Reason_range_check); + set_int("deoptReasonClassCheck", Deoptimization::Reason_class_check); + set_int("deoptReasonArrayCheck", Deoptimization::Reason_array_check); + set_int("deoptReasonUnreached0", Deoptimization::Reason_unreached0); + set_int("deoptReasonTypeCheckInlining", Deoptimization::Reason_type_checked_inlining); + set_int("deoptReasonOptimizedTypeCheck", Deoptimization::Reason_optimized_type_check); + set_int("deoptReasonNotCompiledExceptionHandler", Deoptimization::Reason_not_compiled_exception_handler); + set_int("deoptReasonUnresolved", Deoptimization::Reason_unresolved); + set_int("deoptReasonJsrMismatch", Deoptimization::Reason_jsr_mismatch); + set_int("deoptReasonDiv0Check", Deoptimization::Reason_div0_check); + set_int("deoptReasonConstraint", Deoptimization::Reason_constraint); + + set_int("deoptActionNone", Deoptimization::Action_none); + set_int("deoptActionMaybeRecompile", Deoptimization::Action_maybe_recompile); + set_int("deoptActionReinterpret", Deoptimization::Action_reinterpret); + set_int("deoptActionMakeNotEntrant", Deoptimization::Action_make_not_entrant); + set_int("deoptActionMakeNotCompilable", Deoptimization::Action_make_not_compilable); + BarrierSet* bs = Universe::heap()->barrier_set(); switch (bs->kind()) { diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalEnv.cpp --- a/src/share/vm/graal/graalEnv.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalEnv.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -442,6 +442,12 @@ MethodData* mdp = method()->method_data(); if (mdp != NULL) { mdp->inc_decompile_count(); + if (mdp->decompile_count() > (uint)PerMethodRecompilationCutoff) { + // TODO (chaeubl) enable this in the fastdebug build only once we are more stable + ResourceMark m; + tty->print_cr("WARN: endless recompilation of %s. Method was set to not compilable.", method()->name_and_sig_as_C_string()); + //ShouldNotReachHere(); + } } // All buffers in the CodeBuffer are allocated in the CodeCache. diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalGlobals.hpp --- a/src/share/vm/graal/graalGlobals.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalGlobals.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -40,14 +40,16 @@ #endif // -// Defines all global flags used by the Graal compiler. +// Defines all global flags used by the Graal compiler. Only flags that need +// to be accessible to the Graal C++ code should be defined here. All other +// Graal flags should be defined in GraalOptions.java. // -#define GRAAL_FLAGS(develop, develop_pd, product, product_pd, notproduct) \ +#define GRAAL_FLAGS(develop, develop_pd, product, product_pd, notproduct) \ \ product(bool, DebugGraal, true, \ "Enable JVMTI for the compiler thread") \ \ - product(bool, BootstrapGraal, true, \ + product(bool, BootstrapGraal, GRAALVM_ONLY(true) NOT_GRAALVM(false), \ "Bootstrap Graal before running Java main method") \ \ product(ccstr, GraalClassPath, NULL, \ @@ -56,29 +58,20 @@ product(intx, TraceGraal, 0, \ "Trace level for Graal") \ \ - product(bool, TraceSignals, false, \ - "Trace signals and implicit exception handling") \ + develop(bool, GraalUseFastLocking, true, \ + "Use fast inlined locking code") \ \ - product_pd(intx, SafepointPollOffset, \ - "Offset added to polling address (Intel only)") \ - \ - develop(bool, UseFastNewInstance, true, \ - "Use fast inlined instance allocation") \ - \ - develop(bool, UseFastNewTypeArray, true, \ + develop(bool, GraalUseFastNewTypeArray, true, \ "Use fast inlined type array allocation") \ \ - develop(bool, UseFastNewObjectArray, true, \ + develop(bool, GraalUseFastNewObjectArray, true, \ "Use fast inlined object array allocation") \ \ - develop(bool, UseFastLocking, true, \ - "Use fast inlined locking code") \ - \ develop(intx, GraalNMethodSizeLimit, (64*K)*wordSize, \ "Maximum size of a compiled method.") \ \ - notproduct(bool, PrintSimpleStubs, false, \ - "Print SimpleStubs") \ + notproduct(bool, GraalPrintSimpleStubs, false, \ + "Print simple Graal stubs") \ \ diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalRuntime.cpp --- a/src/share/vm/graal/graalRuntime.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalRuntime.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -119,7 +119,7 @@ // generate code for runtime stub OopMapSet* oop_maps; oop_maps = generate_code_for(id, sasm); - assert(oop_maps == NULL || sasm->frame_size() != no_frame_size, + assert(oop_maps == NULL || sasm->frame_size() != GraalStubAssembler::no_frame_size, "if stub has an oop map it must have a valid frame size"); #ifdef ASSERT @@ -130,14 +130,12 @@ #if defined(SPARC) || defined(PPC) case handle_exception_nofpu_id: // Unused on sparc #endif -#ifdef GRAAL case graal_verify_oop_id: case graal_unwind_exception_call_id: case graal_OSR_migration_end_id: case graal_arithmetic_frem_id: case graal_arithmetic_drem_id: case graal_set_deopt_info_id: -#endif break; // All other stubs should have oopmaps @@ -168,7 +166,7 @@ for (int id = 0; id < number_of_ids; id++) generate_blob_for(blob, (StubID)id); // printing #ifndef PRODUCT - if (PrintSimpleStubs) { + if (GraalPrintSimpleStubs) { ResourceMark rm; for (int id = 0; id < number_of_ids; id++) { _blobs[id]->print(); @@ -469,7 +467,7 @@ // Retry fast entry if bias is revoked to avoid unnecessary inflation ObjectSynchronizer::fast_enter(h_obj, lock, true, CHECK); } else { - if (UseFastLocking) { + if (GraalUseFastLocking) { // When using fast locking, the compiled code has already tried the fast case ObjectSynchronizer::slow_enter(h_obj, lock, THREAD); } else { @@ -500,7 +498,7 @@ } #endif - if (UseFastLocking) { + if (GraalUseFastLocking) { // When using fast locking, the compiled code has already tried the fast case ObjectSynchronizer::slow_exit(obj, lock, THREAD); } else { diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/graal/graalRuntime.hpp --- a/src/share/vm/graal/graalRuntime.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/graal/graalRuntime.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -43,6 +43,11 @@ int _stub_id; public: + + enum { + no_frame_size = -1 + }; + // creation GraalStubAssembler(CodeBuffer* code, const char * name, int stub_id); void set_info(const char* name, bool must_gc_arguments); @@ -70,9 +75,6 @@ // set frame size and return address offset to these values in blobs // (if the compiled frame uses ebp as link pointer on IA; otherwise, // the frame size must be fixed) -enum { - no_frame_size = -1 -}; // Holds all assembly stubs and VM // runtime routines needed by code code generated diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/interpreter/interpreter.cpp --- a/src/share/vm/interpreter/interpreter.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/interpreter/interpreter.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -394,26 +394,6 @@ return Interpreter::deopt_entry(vtos, 0); } -#ifdef GRAAL - - -// If deoptimization happens, the interpreter should reexecute these bytecodes. -// This function mainly helps the compilers to set up the reexecute bit. -bool AbstractInterpreter::bytecode_should_reexecute(Bytecodes::Code code) { - switch (code) { - case Bytecodes::_invokedynamic: - case Bytecodes::_invokevirtual: - case Bytecodes::_invokeinterface: - case Bytecodes::_invokespecial: - case Bytecodes::_invokestatic: - return false; - default: - return true; - } - return true; -} -#else - // If deoptimization happens, the interpreter should reexecute these bytecodes. // This function mainly helps the compilers to set up the reexecute bit. bool AbstractInterpreter::bytecode_should_reexecute(Bytecodes::Code code) { @@ -452,7 +432,7 @@ case Bytecodes::_getstatic : case Bytecodes::_putstatic : case Bytecodes::_aastore : -#if defined(COMPILER1) || defined(GRAAL) +#if defined(COMPILER1) //special case of reexecution case Bytecodes::_athrow : @@ -463,7 +443,6 @@ return false; } } -#endif void AbstractInterpreterGenerator::bang_stack_shadow_pages(bool native_call) { // Quick & dirty stack overflow checking: bang the stack & handle trap. diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/interpreter/interpreterRuntime.cpp --- a/src/share/vm/interpreter/interpreterRuntime.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/interpreter/interpreterRuntime.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -433,11 +433,13 @@ } } while (should_repeat == true); -#ifdef GRAAL +#ifdef GRAALVM if (h_method->method_data() != NULL) { ResourceMark rm(thread); ProfileData* pdata = h_method->method_data()->allocate_bci_to_data(current_bci); if (pdata != NULL) { + // We re-purpose the DS_RECOMPILE_BIT to record that an exception was thrown at + // the current bci. int tstate0 = pdata->trap_state(); int tstate1 = Deoptimization::trap_state_set_recompiled(tstate0, true); if (tstate1 != tstate0) { diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/interpreter/rewriter.cpp --- a/src/share/vm/interpreter/rewriter.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/interpreter/rewriter.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -122,9 +122,7 @@ Bytecodes::Code opcode = bcs.raw_next(); switch (opcode) { case Bytecodes::_return: -#ifndef GRAAL *bcs.bcp() = Bytecodes::_return_register_finalizer; -#endif break; case Bytecodes::_istore: @@ -325,7 +323,6 @@ switch (c) { case Bytecodes::_lookupswitch : { #ifndef CC_INTERP -#ifndef GRAAL Bytecode_lookupswitch bc(method, bcp); (*bcp) = ( bc.number_of_pairs() < BinarySwitchThreshold @@ -333,7 +330,6 @@ : Bytecodes::_fast_binaryswitch ); #endif -#endif break; } case Bytecodes::_fast_linearswitch: diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/interpreter/templateTable.cpp --- a/src/share/vm/interpreter/templateTable.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/interpreter/templateTable.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -429,11 +429,7 @@ def(Bytecodes::_jsr , ubcp|disp|____|____, vtos, vtos, jsr , _ ); // result is not an oop, so do not transition to atos def(Bytecodes::_ret , ubcp|disp|____|____, vtos, vtos, ret , _ ); def(Bytecodes::_tableswitch , ubcp|disp|____|____, itos, vtos, tableswitch , _ ); -#ifdef GRAAL - def(Bytecodes::_lookupswitch , ubcp|disp|____|____, itos, vtos, fast_linearswitch , _ ); -#else def(Bytecodes::_lookupswitch , ubcp|disp|____|____, itos, itos, lookupswitch , _ ); -#endif def(Bytecodes::_ireturn , ____|disp|clvm|____, itos, itos, _return , itos ); def(Bytecodes::_lreturn , ____|disp|clvm|____, ltos, ltos, _return , ltos ); def(Bytecodes::_freturn , ____|disp|clvm|____, ftos, ftos, _return , ftos ); diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/oops/instanceKlass.cpp --- a/src/share/vm/oops/instanceKlass.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/oops/instanceKlass.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -2161,7 +2161,7 @@ void InstanceKlass::clean_method_data(BoolObjectClosure* is_alive) { #if defined(COMPILER2) || defined(GRAAL) - // Currently only used by C2. + // Currently only used by C2 and Graal for (int m = 0; m < methods()->length(); m++) { MethodData* mdo = methods()->at(m)->method_data(); if (mdo != NULL) { diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/oops/method.cpp --- a/src/share/vm/oops/method.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/oops/method.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -121,6 +121,11 @@ backedge_counter()->init(); clear_number_of_breakpoints(); +#ifdef GRAAL + set_graal_invocation_time(0L); + set_graal_priority(0); +#endif + #ifdef TIERED set_rate(0); set_prev_event_count(0); diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/oops/methodData.cpp --- a/src/share/vm/oops/methodData.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/oops/methodData.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -455,7 +455,7 @@ return DataLayout::compute_size_in_bytes(cell_count); } -#ifdef GRAAL +#ifdef GRAALVM int MethodData::compute_extra_data_count(int data_size, int empty_bc_count) { if (!ProfileTraps) return 0; @@ -500,7 +500,7 @@ int extra_data_count = compute_extra_data_count(data_size, empty_bc_count); object_size += extra_data_count * DataLayout::compute_size_in_bytes(0); -#ifndef GRAAL +#ifndef GRAALVM // Add a cell to record information about modified arguments. int arg_size = method->size_of_parameters(); object_size += DataLayout::compute_size_in_bytes(arg_size+1); @@ -704,7 +704,7 @@ int extra_size = extra_data_count * DataLayout::compute_size_in_bytes(0); object_size += extra_size; -#ifndef GRAAL +#ifndef GRAALVM // Add a cell to record information about modified arguments. // Set up _args_modified array after traps cells so that // the code for traps cells works. @@ -728,7 +728,7 @@ } bool MethodData::is_empty_data(int size_in_bytes, Bytecodes::Code code) { -#ifdef GRAAL +#ifdef GRAALVM return size_in_bytes == 0 && Bytecodes::can_trap(code); #else return size_in_bytes == 0; @@ -762,12 +762,6 @@ void MethodData::inc_decompile_count() { _nof_decompiles += 1; if (decompile_count() > (uint)PerMethodRecompilationCutoff) { -#ifdef GRAAL - // TODO (chaeubl) enable this in the fastdebug build only once we are more stable - ResourceMark m; - tty->print_cr("WARN: endless recompilation of %s. Method was set to not compilable.", method()->name_and_sig_as_C_string()); - //ShouldNotReachHere(); -#endif method()->set_not_compilable(CompLevel_full_optimization); } } diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/oops/methodData.hpp --- a/src/share/vm/oops/methodData.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/oops/methodData.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -625,7 +625,14 @@ class ReceiverTypeData : public CounterData { protected: enum { +#ifdef GRAAL + // Graal is interested in knowing the percentage of type checks + // involving a type not explicitly in the profile + nonprofiled_receiver_count_off_set = counter_cell_count, + receiver0_offset, +#else receiver0_offset = counter_cell_count, +#endif count0_offset, receiver_type_row_cell_count = (count0_offset + 1) - receiver0_offset }; @@ -639,7 +646,7 @@ virtual bool is_ReceiverTypeData() { return true; } static int static_cell_count() { - return counter_cell_count + (uint) TypeProfileWidth * receiver_type_row_cell_count; + return counter_cell_count + (uint) TypeProfileWidth * receiver_type_row_cell_count GRAAL_ONLY(+ 1); } virtual int cell_count() { @@ -710,6 +717,11 @@ static ByteSize receiver_count_offset(uint row) { return cell_offset(receiver_count_cell_index(row)); } +#ifdef GRAAL + static ByteSize nonprofiled_receiver_count_offset() { + return cell_offset(nonprofiled_receiver_count_off_set); + } +#endif static ByteSize receiver_type_data_size() { return cell_offset(static_cell_count()); } @@ -1168,11 +1180,7 @@ // Whole-method sticky bits and flags enum { -#ifdef GRAAL - _trap_hist_limit = 13, // decoupled from Deoptimization::Reason_LIMIT -#else _trap_hist_limit = 17, // decoupled from Deoptimization::Reason_LIMIT -#endif _trap_hist_mask = max_jubyte, _extra_data_count = 4 // extra DataLayout headers, for trap history }; // Public flag values diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/opto/c2compiler.hpp --- a/src/share/vm/opto/c2compiler.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/opto/c2compiler.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -33,6 +33,8 @@ static void initialize_runtime(); public: + C2Compiler() : AbstractCompiler(c2) {} + // Name const char *name() { return "C2"; } diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/arguments.cpp --- a/src/share/vm/runtime/arguments.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/arguments.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -3442,9 +3442,12 @@ // which are subtlely different from each other but neither works with // biased locking. if (UseHeavyMonitors -#if defined(COMPILER1) || defined(GRAAL) +#ifdef COMPILER1 || !UseFastLocking #endif // COMPILER1 +#ifdef GRAAL + || !GraalUseFastLocking +#endif // GRAAL ) { if (!FLAG_IS_DEFAULT(UseBiasedLocking) && UseBiasedLocking) { // flag set to true on command line; warn the user that they diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/compilationPolicy.cpp --- a/src/share/vm/runtime/compilationPolicy.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/compilationPolicy.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -60,7 +60,7 @@ break; case 1: -#if defined(COMPILER2) || defined(GRAAL) +#if defined(COMPILER2) CompilationPolicy::set_policy(new StackWalkCompPolicy()); #else Unimplemented(); @@ -81,7 +81,7 @@ #endif break; case 4: -#if defined(GRAAL) +#if defined(GRAALVM) CompilationPolicy::set_policy(new GraalCompPolicy()); #else Unimplemented(); @@ -188,7 +188,7 @@ #endif #ifdef COMPILER1 - GRAAL_ONLY(ShouldNotReachHere();) + GRAALVM_ONLY(ShouldNotReachHere();) if (is_c1_compile(comp_level)) { return _compiler_count; } else { @@ -442,7 +442,7 @@ // GraalCompPolicy - compile current method -#ifdef GRAAL +#ifdef GRAALVM void GraalCompPolicy::method_invocation_event(methodHandle m, JavaThread* thread) { int hot_count = m->invocation_count(); @@ -499,12 +499,12 @@ } } -#endif // GRAAL +#endif // GRAALVM // StackWalkCompPolicy - walk up stack to find a suitable method to compile -#if defined(COMPILER2) || defined(GRAAL) +#if defined(COMPILER2) const char* StackWalkCompPolicy::_msg = NULL; @@ -731,4 +731,4 @@ -#endif // COMPILER2 || GRAAL +#endif // COMPILER2 diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/compilationPolicy.hpp --- a/src/share/vm/runtime/compilationPolicy.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/compilationPolicy.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -116,17 +116,17 @@ virtual void method_back_branch_event(methodHandle m, int bci, JavaThread* thread); }; -#ifdef GRAAL +#ifdef GRAALVM class GraalCompPolicy : public NonTieredCompPolicy { public: virtual void method_invocation_event(methodHandle m, JavaThread* thread); virtual void method_back_branch_event(methodHandle m, int bci, JavaThread* thread); }; -#endif // GRAAL +#endif // GRAALVM // StackWalkCompPolicy - existing C2 policy -#if defined(COMPILER2) || defined(GRAAL) +#if defined(COMPILER2) class StackWalkCompPolicy : public NonTieredCompPolicy { public: virtual void method_invocation_event(methodHandle m, JavaThread* thread); diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/deoptimization.cpp --- a/src/share/vm/runtime/deoptimization.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/deoptimization.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -1310,8 +1310,8 @@ if (thread->graal_deopt_info() != NULL) { oop deopt_info = thread->graal_deopt_info(); if (java_lang_String::is_instance(deopt_info)) { - char buf[1024]; - java_lang_String::as_utf8_string(deopt_info, buf, 1024); + char buf[O_BUFLEN]; + java_lang_String::as_utf8_string(deopt_info, buf, O_BUFLEN); tty->print_cr("deopt info: %s", buf); } else { tty->print_cr("deopt info:"); @@ -1856,40 +1856,24 @@ Deoptimization::DeoptAction Deoptimization::_unloaded_action = Deoptimization::Action_reinterpret; const char* Deoptimization::_trap_reason_name[Reason_LIMIT] = { -#ifdef GRAAL + // Note: Keep this in sync. with enum DeoptReason. "none", "null_check", + "null_assert" GRAAL_ONLY("|unreached0"), "range_check", "class_check", "array_check", + "intrinsic" GRAAL_ONLY("|type_checked_inlining"), + "bimorphic" GRAAL_ONLY("|optimized_type_check"), + "unloaded", + "uninitialized" GRAAL_ONLY("|unresolved"), "unreached", - "type_checked_inlining", - "optimized_type_check", - "not_compiled_exception_handler", - "unresolved", - "jsr_mismatch", - "div0_check", - "constraint" -#else - // Note: Keep this in sync. with enum DeoptReason. - "none", - "null_check", - "null_assert", - "range_check", - "class_check", - "array_check", - "intrinsic", - "bimorphic", - "unloaded", - "uninitialized", - "unreached", - "unhandled", + "unhandled" GRAAL_ONLY("|not_compiled_exception_handler"), "constraint", "div0_check", - "age", + "age" GRAAL_ONLY("|jsr_mismatch"), "predicate", "loop_limit_check" -#endif }; const char* Deoptimization::_trap_action_name[Action_LIMIT] = { // Note: Keep this in sync. with enum DeoptAction. diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/deoptimization.hpp --- a/src/share/vm/runtime/deoptimization.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/deoptimization.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -39,7 +39,6 @@ public: // What condition caused the deoptimization enum DeoptReason { -#ifdef GRAAL Reason_many = -1, // indicates presence of several reasons Reason_none = 0, // indicates absence of a relevant deopt. // Next 7 reasons are recorded per bytecode in DataLayout::trap_bits. @@ -49,26 +48,6 @@ // - bytecode y in method b() causes deopt // - Graal deoptimizes to bytecode x in method a() // -> the deopt reason will be recorded for method a() at bytecode x - Reason_null_check, - Reason_range_check, - Reason_class_check, - Reason_array_check, - Reason_unreached, - Reason_type_checked_inlining, - Reason_optimized_type_check, - - // recorded per method - Reason_not_compiled_exception_handler, - Reason_unresolved, - Reason_jsr_mismatch, - Reason_div0_check, - Reason_constraint, - Reason_LIMIT, - Reason_RECORDED_LIMIT = Reason_optimized_type_check -#else - Reason_many = -1, // indicates presence of several reasons - Reason_none = 0, // indicates absence of a relevant deopt. - // Next 7 reasons are recorded per bytecode in DataLayout::trap_bits Reason_null_check, // saw unexpected null or zero divisor (@bci) Reason_null_assert, // saw unexpected non-null or non-zero (@bci) Reason_range_check, // saw unexpected array index (@bci) @@ -77,6 +56,13 @@ Reason_intrinsic, // saw unexpected operand to intrinsic (@bci) Reason_bimorphic, // saw unexpected object class in bimorphic inlining (@bci) +#ifdef GRAAL + Reason_unreached0 = Reason_null_assert, + Reason_type_checked_inlining = Reason_intrinsic, + Reason_optimized_type_check = Reason_bimorphic, +#endif + + // recorded per method Reason_unloaded, // unloaded or class constant pool entry Reason_uninitialized, // bad class state (uninitialized) Reason_unreached, // code is not reached, compiler @@ -87,8 +73,14 @@ Reason_predicate, // compiler generated predicate failed Reason_loop_limit_check, // compiler generated loop limits check failed Reason_LIMIT, + +#ifdef GRAAL + Reason_not_compiled_exception_handler = Reason_unhandled, + Reason_unresolved = Reason_uninitialized, + Reason_jsr_mismatch = Reason_age, +#endif + Reason_RECORDED_LIMIT = Reason_bimorphic // some are not recorded per bc -#endif // GRAAL // Note: Keep this enum in sync. with _trap_reason_name. // Note: Reason_RECORDED_LIMIT should be < 8 to fit into 3 bits of // DataLayout::trap_bits. This dependency is enforced indirectly @@ -283,7 +275,7 @@ return (DeoptReason) ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits)); } else { -#ifdef GRAAL +#ifdef GRAALVM ShouldNotReachHere(); return Reason_none; #else @@ -297,7 +289,7 @@ return (DeoptAction) ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits)); } else { -#ifdef GRAAL +#ifdef GRAALVM ShouldNotReachHere(); return Action_make_not_compilable; #else @@ -310,7 +302,7 @@ if (trap_request < 0) { return -1; } else { -#ifdef GRAAL +#ifdef GRAALVM ShouldNotReachHere(); return -1; #else @@ -320,7 +312,7 @@ } static int make_trap_request(DeoptReason reason, DeoptAction action, int index = -1) { -#ifdef GRAAL +#ifdef GRAALVM assert(index == -1, "Graal does not use index"); #endif diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/globals.hpp --- a/src/share/vm/runtime/globals.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/globals.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -1219,6 +1219,9 @@ notproduct(bool, TraceJVMCalls, false, \ "Trace JVM calls") \ \ + product(bool, TraceSignals, false, \ + "Trace signals and implicit exception handling") \ + \ product(ccstr, TraceJVMTI, NULL, \ "Trace flags for JVMTI functions and events") \ \ @@ -2373,7 +2376,7 @@ product(intx, CICompilerCount, CI_COMPILER_COUNT, \ "Number of compiler threads to run") \ \ - product(intx, CompilationPolicyChoice, NOT_GRAAL(0) GRAAL_ONLY(4), \ + product(intx, CompilationPolicyChoice, NOT_GRAALVM(0) GRAALVM_ONLY(4), \ "which compilation policy (0/1)") \ \ develop(bool, UseStackBanging, true, \ diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/mutexLocker.cpp --- a/src/share/vm/runtime/mutexLocker.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/mutexLocker.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -292,7 +292,7 @@ def(JfrStream_lock , Mutex, nonleaf+4, true); def(PeriodicTask_lock , Monitor, nonleaf+5, true); #ifdef GRAAL - def(GraalDeoptLeafGraphIds_lock , Mutex, special, true); + def(GraalDeoptLeafGraphIds_lock , Mutex, special, true); #endif // GRAAL } diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/sharedRuntime.cpp --- a/src/share/vm/runtime/sharedRuntime.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/sharedRuntime.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -630,24 +630,25 @@ ResourceMark rm; #ifdef GRAAL - // lookup exception handler for this pc - int catch_pco = ret_pc - nm->code_begin(); - ExceptionHandlerTable table(nm); - HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0); - if (t != NULL) { - return nm->code_begin() + t->pco(); - } else { - // there is no exception handler for this pc => deoptimize - nm->make_not_entrant(); - JavaThread* thread = JavaThread::current(); - RegisterMap reg_map(thread); - frame runtime_frame = thread->last_frame(); - frame caller_frame = runtime_frame.sender(®_map); - Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_not_compiled_exception_handler); - return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls(); + if (nm->is_compiled_by_graal()) { + // lookup exception handler for this pc + int catch_pco = ret_pc - nm->code_begin(); + ExceptionHandlerTable table(nm); + HandlerTableEntry *t = table.entry_for(catch_pco, -1, 0); + if (t != NULL) { + return nm->code_begin() + t->pco(); + } else { + // there is no exception handler for this pc => deoptimize + nm->make_not_entrant(); + JavaThread* thread = JavaThread::current(); + RegisterMap reg_map(thread); + frame runtime_frame = thread->last_frame(); + frame caller_frame = runtime_frame.sender(®_map); + Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_not_compiled_exception_handler); + return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls(); + } } - -#else +#endif ScopeDesc* sd = nm->scope_desc_at(ret_pc); // determine handler bci, if any @@ -728,7 +729,6 @@ } return nm->code_begin() + t->pco(); -#endif } JRT_ENTRY(void, SharedRuntime::throw_AbstractMethodError(JavaThread* thread)) @@ -871,9 +871,13 @@ _implicit_null_throws++; #endif #ifdef GRAAL - target_pc = deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_null_check); -#else + if (nm->is_compiled_by_graal()) { + target_pc = deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_null_check); + } else { +#endif target_pc = nm->continuation_for_implicit_exception(pc); +#ifdef GRAAL + } #endif // If there's an unexpected fault, target_pc might be NULL, // in which case we want to fall through into the normal @@ -891,12 +895,16 @@ _implicit_div0_throws++; #endif #ifdef GRAAL - if (TraceSignals) { - tty->print_cr("Graal implicit div0"); + if (nm->is_compiled_by_graal()) { + if (TraceSignals) { + tty->print_cr("Graal implicit div0"); + } + target_pc = deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_div0_check); + } else { +#endif + target_pc = nm->continuation_for_implicit_exception(pc); +#ifdef GRAAL } - target_pc = deoptimize_for_implicit_exception(thread, pc, nm, Deoptimization::Reason_div0_check); -#else - target_pc = nm->continuation_for_implicit_exception(pc); #endif // If there's an unexpected fault, target_pc might be NULL, // in which case we want to fall through into the normal diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/vframeArray.cpp --- a/src/share/vm/runtime/vframeArray.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/vframeArray.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -252,7 +252,7 @@ case Deoptimization::Unpack_uncommon_trap: case Deoptimization::Unpack_reexecute: // redo last byte code -#ifdef GRAAL +#ifdef GRAALVM assert(should_reexecute(), ""); #endif pc = Interpreter::deopt_entry(vtos, 0); diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/runtime/vm_version.cpp --- a/src/share/vm/runtime/vm_version.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/runtime/vm_version.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -124,7 +124,7 @@ #define VMTYPE "Zero" #endif // SHARK #else // ZERO - #ifdef GRAAL + #ifdef GRAALVM #define VMTYPE "Graal" #else // GRAAL #define VMTYPE COMPILER1_PRESENT("Client") \ diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/shark/sharkCompiler.cpp --- a/src/share/vm/shark/sharkCompiler.cpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/shark/sharkCompiler.cpp Wed Dec 12 21:36:40 2012 +0100 @@ -60,7 +60,7 @@ #endif SharkCompiler::SharkCompiler() - : AbstractCompiler() { + : AbstractCompiler(shark) { // Create the lock to protect the memory manager and execution engine _execution_engine_lock = new Monitor(Mutex::leaf, "SharkExecutionEngineLock"); MutexLocker locker(execution_engine_lock()); diff -r c421c19b7bf8 -r 5d0bb7d52783 src/share/vm/utilities/macros.hpp --- a/src/share/vm/utilities/macros.hpp Wed Dec 12 15:46:11 2012 +0100 +++ b/src/share/vm/utilities/macros.hpp Wed Dec 12 21:36:40 2012 +0100 @@ -182,9 +182,23 @@ #ifdef GRAAL #define GRAAL_ONLY(code) code #define NOT_GRAAL(code) +#if !defined(COMPILER1) && !defined(COMPILER2) +// Graal is the only compiler in the system and so will be used for compilation +// requests issued by the compile broker. +#define GRAALVM +#define GRAALVM_ONLY(code) code +#define NOT_GRAALVM(code) #else +// Graal is not the only compiler in the system and so will only be used for +// compilation requests issued via the Graal API +#define GRAALVM_ONLY(code) +#define NOT_GRAALVM(code) code +#endif +#else // !GRAAL #define GRAAL_ONLY(code) #define NOT_GRAAL(code) code +#define GRAALVM_ONLY(code) +#define NOT_GRAALVM(code) code #endif // GRAAL #ifdef HIGH_LEVEL_INTERPRETER