# HG changeset patch # User Doug Simon # Date 1397114075 -7200 # Node ID d4a9d6f6e57d1cb4ef73957c1d2d31379742b2d4 # Parent 04f1723150b43388c913fbc61832aeaeaaf41e11# Parent d8b9e3761e52c4f49a53e65b0f7a6ec53a1a1a45 Merge. diff -r 04f1723150b4 -r d4a9d6f6e57d graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotNodeLIRBuilder.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotNodeLIRBuilder.java Thu Apr 10 00:05:24 2014 +0200 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotNodeLIRBuilder.java Thu Apr 10 09:14:35 2014 +0200 @@ -120,8 +120,7 @@ assert invokeKind == InvokeKind.Static || invokeKind == InvokeKind.Special; HotSpotResolvedJavaMethod resolvedMethod = (HotSpotResolvedJavaMethod) callTarget.target(); assert !Modifier.isAbstract(resolvedMethod.getModifiers()) : "Cannot make direct call to abstract method."; - Constant metaspaceMethod = resolvedMethod.getMetaspaceMethodConstant(); - append(new AMD64HotspotDirectStaticCallOp(callTarget.target(), result, parameters, temps, callState, invokeKind, metaspaceMethod)); + append(new AMD64HotspotDirectStaticCallOp(callTarget.target(), result, parameters, temps, callState, invokeKind)); } } diff -r 04f1723150b4 -r d4a9d6f6e57d graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotspotDirectStaticCallOp.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotspotDirectStaticCallOp.java Thu Apr 10 00:05:24 2014 +0200 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotspotDirectStaticCallOp.java Thu Apr 10 09:14:35 2014 +0200 @@ -22,14 +22,11 @@ */ package com.oracle.graal.hotspot.amd64; -import com.oracle.graal.amd64.*; import com.oracle.graal.api.meta.*; import com.oracle.graal.asm.amd64.*; -import com.oracle.graal.hotspot.*; import com.oracle.graal.hotspot.meta.HotSpotCodeCacheProvider.MarkId; import com.oracle.graal.lir.*; import com.oracle.graal.lir.amd64.AMD64Call.DirectCallOp; -import com.oracle.graal.lir.amd64.*; import com.oracle.graal.lir.asm.*; import com.oracle.graal.nodes.java.MethodCallTargetNode.InvokeKind; @@ -40,24 +37,17 @@ @Opcode("CALL_DIRECT") final class AMD64HotspotDirectStaticCallOp extends DirectCallOp { - private final Constant metaspaceMethod; private final InvokeKind invokeKind; - AMD64HotspotDirectStaticCallOp(ResolvedJavaMethod target, Value result, Value[] parameters, Value[] temps, LIRFrameState state, InvokeKind invokeKind, Constant metaspaceMethod) { + AMD64HotspotDirectStaticCallOp(ResolvedJavaMethod target, Value result, Value[] parameters, Value[] temps, LIRFrameState state, InvokeKind invokeKind) { super(target, result, parameters, temps, state); assert invokeKind == InvokeKind.Static || invokeKind == InvokeKind.Special; - this.metaspaceMethod = metaspaceMethod; this.invokeKind = invokeKind; } @Override public void emitCode(CompilationResultBuilder crb, AMD64MacroAssembler masm) { - // The mark for an invocation that uses an inline cache must be placed at the - // instruction that loads the Klass from the inline cache. - AMD64Move.move(crb, masm, AMD64.rbx.asValue(Kind.Long), metaspaceMethod); MarkId.recordMark(crb, invokeKind == InvokeKind.Static ? MarkId.INVOKESTATIC : MarkId.INVOKESPECIAL); - // This must be emitted exactly like this to ensure it's patchable - masm.movq(AMD64.rax, HotSpotGraalRuntime.runtime().getConfig().nonOopBits); super.emitCode(crb, masm); } } diff -r 04f1723150b4 -r d4a9d6f6e57d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/CompilationTask.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/CompilationTask.java Thu Apr 10 00:05:24 2014 +0200 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/CompilationTask.java Thu Apr 10 09:14:35 2014 +0200 @@ -59,8 +59,6 @@ public class CompilationTask implements Runnable, Comparable { - private static final long TIMESTAMP_START = System.currentTimeMillis(); - // Keep static finals in a group with withinEnqueue as the last one. CompilationTask can be // called from within it's own clinit so it needs to be careful about accessing state. Once // withinEnqueue is non-null we assume that CompilationTask is fully initialized. @@ -372,18 +370,18 @@ } HotSpotVMConfig config = backend.getRuntime().getConfig(); int compLevel = config.compilationLevelFullOptimization; - char compLevelChar; + String compLevelString; if (config.tieredCompilation) { - compLevelChar = '-'; + compLevelString = "- "; if (compLevel != -1) { - compLevelChar = (char) ('0' + compLevel); + compLevelString = (char) ('0' + compLevel) + " "; } } else { - compLevelChar = ' '; + compLevelString = ""; } boolean hasExceptionHandlers = method.getExceptionHandlers().length > 0; - TTY.println(String.format("%s%7d %4d %c%c%c%c%c%c %s %s(%d bytes)", compilerName, (System.currentTimeMillis() - TIMESTAMP_START), id, isOSR ? '%' : ' ', - Modifier.isSynchronized(mod) ? 's' : ' ', hasExceptionHandlers ? '!' : ' ', blocking ? 'b' : ' ', Modifier.isNative(mod) ? 'n' : ' ', compLevelChar, + TTY.println(String.format("%s%7d %4d %c%c%c%c%c %s %s %s(%d bytes)", compilerName, backend.getRuntime().compilerToVm.getTimeStamp(), id, isOSR ? '%' : ' ', + Modifier.isSynchronized(mod) ? 's' : ' ', hasExceptionHandlers ? '!' : ' ', blocking ? 'b' : ' ', Modifier.isNative(mod) ? 'n' : ' ', compLevelString, MetaUtil.format("%H::%n(%p)", method), isOSR ? "@ " + entryBCI + " " : "", method.getCodeSize())); } diff -r 04f1723150b4 -r d4a9d6f6e57d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVM.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVM.java Thu Apr 10 00:05:24 2014 +0200 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVM.java Thu Apr 10 09:14:35 2014 +0200 @@ -328,4 +328,12 @@ * @return true if the {@code metaspaceMethod} has code for {@code level} */ boolean hasCompiledCodeForOSR(long metaspaceMethod, int entryBCI, int level); + + /** + * Fetch the time stamp used for printing inside hotspot. It's relative to VM start to that all + * events can be ordered. + * + * @return milliseconds since VM start + */ + long getTimeStamp(); } diff -r 04f1723150b4 -r d4a9d6f6e57d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVMImpl.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVMImpl.java Thu Apr 10 00:05:24 2014 +0200 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVMImpl.java Thu Apr 10 09:14:35 2014 +0200 @@ -182,4 +182,5 @@ public native boolean hasCompiledCodeForOSR(long metaspaceMethod, int entryBCI, int level); + public native long getTimeStamp(); } diff -r 04f1723150b4 -r d4a9d6f6e57d graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/extended/UnsafeAccessNode.java --- a/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/extended/UnsafeAccessNode.java Thu Apr 10 00:05:24 2014 +0200 +++ b/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/extended/UnsafeAccessNode.java Thu Apr 10 09:14:35 2014 +0200 @@ -78,13 +78,15 @@ } } } - ResolvedJavaType receiverType = ObjectStamp.typeOrNull(object()); - if (receiverType != null && receiverType.isArray()) { - LocationIdentity identity = NamedLocationIdentity.getArrayLocation(receiverType.getComponentType().getKind()); - // Try to build a better location node - ValueNode location = offset(); - return cloneAsArrayAccess(location, identity); - } + // Temporarily disable this as it appears to break truffle. + // ResolvedJavaType receiverType = ObjectStamp.typeOrNull(object()); + // if (receiverType != null && receiverType.isArray()) { + // LocationIdentity identity = + // NamedLocationIdentity.getArrayLocation(receiverType.getComponentType().getKind()); + // // Try to build a better location node + // ValueNode location = offset(); + // return cloneAsArrayAccess(location, identity); + // } return this; } diff -r 04f1723150b4 -r d4a9d6f6e57d src/cpu/sparc/vm/compiledIC_sparc.cpp --- a/src/cpu/sparc/vm/compiledIC_sparc.cpp Thu Apr 10 00:05:24 2014 +0200 +++ b/src/cpu/sparc/vm/compiledIC_sparc.cpp Thu Apr 10 09:14:35 2014 +0200 @@ -81,25 +81,26 @@ // ---------------------------------------------------------------------------- #define __ _masm. -void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { -#ifdef COMPILER2 +void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { // Stub is fixed up when the corresponding call is converted from calling // compiled code to calling interpreted code. // set (empty), G5 // jmp -1 - address mark = cbuf.insts_mark(); // Get mark within main instrs section. + if (mark == NULL) { + mark = cbuf.insts_mark(); // Get mark within main instrs section. + } MacroAssembler _masm(&cbuf); address base = - __ start_a_stub(to_interp_stub_size()*2); - if (base == NULL) return; // CodeBuffer::expand failed. + __ start_a_stub(to_interp_stub_size()); + guarantee(base != NULL, "out of space"); // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark)); - __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode())); + __ set_metadata(NULL, G5); __ set_inst_mark(); AddressLiteral addrlit(-1); @@ -107,11 +108,10 @@ __ delayed()->nop(); + assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); + // Update current stubs pointer and restore code_end. __ end_a_stub(); -#else - ShouldNotReachHere(); -#endif } #undef __ diff -r 04f1723150b4 -r d4a9d6f6e57d src/cpu/x86/vm/compiledIC_x86.cpp --- a/src/cpu/x86/vm/compiledIC_x86.cpp Thu Apr 10 00:05:24 2014 +0200 +++ b/src/cpu/x86/vm/compiledIC_x86.cpp Thu Apr 10 09:14:35 2014 +0200 @@ -78,21 +78,24 @@ // ---------------------------------------------------------------------------- #define __ _masm. -void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) { +void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { // Stub is fixed up when the corresponding call is converted from // calling compiled code to calling interpreted code. // movq rbx, 0 // jmp -5 # to self - address mark = cbuf.insts_mark(); // Get mark within main instrs section. + if (mark == NULL) { + mark = cbuf.insts_mark(); // Get mark within main instrs section. + } // Note that the code buffer's insts_mark is always relative to insts. // That's why we must use the macroassembler to generate a stub. MacroAssembler _masm(&cbuf); address base = - __ start_a_stub(to_interp_stub_size()*2); - if (base == NULL) return; // CodeBuffer::expand failed. + __ start_a_stub(to_interp_stub_size()); + guarantee(base != NULL, "out of space"); + // Static stub relocation stores the instruction address of the call. __ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand); // Static stub relocation also tags the Method* in the code-stream. @@ -100,6 +103,8 @@ // This is recognized as unresolved by relocs/nativeinst/ic code. __ jump(RuntimeAddress(__ pc())); + assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); + // Update current stubs pointer and restore insts_end. __ end_a_stub(); } @@ -117,12 +122,6 @@ void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) { address stub = find_stub(); -#ifdef GRAAL - if (stub == NULL) { - set_destination_mt_safe(entry); - return; - } -#endif guarantee(stub != NULL, "stub not found"); if (TraceICs) { @@ -172,14 +171,12 @@ verify_alignment(); } -#ifndef GRAAL // Verify stub. address stub = find_stub(); assert(stub != NULL, "no stub found for static call"); // Creation also verifies the object. NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); -#endif // Verify state. assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); diff -r 04f1723150b4 -r d4a9d6f6e57d src/share/vm/code/compiledIC.cpp --- a/src/share/vm/code/compiledIC.cpp Thu Apr 10 00:05:24 2014 +0200 +++ b/src/share/vm/code/compiledIC.cpp Thu Apr 10 09:14:35 2014 +0200 @@ -76,7 +76,7 @@ // Don't use ic_destination for this test since that forwards // through ICBuffer instead of returning the actual current state of // the CompiledIC. - if (is_icholder_entry(_ic_call->destination()) GRAAL_ONLY(&& _value != NULL)) { + if (is_icholder_entry(_ic_call->destination())) { // When patching for the ICStub case the cached value isn't // overwritten until the ICStub copied into the CompiledIC during // the next safepoint. Make sure that the CompiledICHolder* is @@ -107,13 +107,6 @@ _ic_call->set_destination_mt_safe(entry_point); } -#ifdef GRAAL - if (_value == NULL) { - // Can happen when Graal converted a virtual call into an invoke special based on static analysis. - return; - } -#endif - if (is_optimized() || is_icstub) { // Optimized call sites don't have a cache value and ICStub call // sites only change the entry point. Changing the value in that @@ -238,8 +231,8 @@ // for calling directly to vep without using the inline cache (i.e., cached_value == NULL) #ifdef ASSERT CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address()); - bool is_c1_or_graal_method = caller->is_compiled_by_c1() || caller->is_compiled_by_graal(); - assert( is_c1_or_graal_method || + bool is_c1_method = caller->is_compiled_by_c1(); + assert( is_c1_method || !is_monomorphic || is_optimized() || (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check"); @@ -264,14 +257,12 @@ // Check if we are calling into our own codeblob (i.e., to a stub) CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address()); address dest = ic_destination(); -#ifndef GRAAL #ifdef ASSERT { CodeBlob* db = CodeCache::find_blob_unsafe(dest); assert(!db->is_adapter_blob(), "must use stub!"); } #endif /* ASSERT */ -#endif is_call_to_interpreted = cb->contains(dest); } return is_call_to_interpreted; diff -r 04f1723150b4 -r d4a9d6f6e57d src/share/vm/code/compiledIC.hpp --- a/src/share/vm/code/compiledIC.hpp Thu Apr 10 00:05:24 2014 +0200 +++ b/src/share/vm/code/compiledIC.hpp Thu Apr 10 09:14:35 2014 +0200 @@ -308,7 +308,7 @@ friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site); // Code - static void emit_to_interp_stub(CodeBuffer &cbuf); + static void emit_to_interp_stub(CodeBuffer &cbuf, address mark = NULL); static int to_interp_stub_size(); static int reloc_to_interp_stub(); diff -r 04f1723150b4 -r d4a9d6f6e57d src/share/vm/graal/graalCodeInstaller.cpp --- a/src/share/vm/graal/graalCodeInstaller.cpp Thu Apr 10 00:05:24 2014 +0200 +++ b/src/share/vm/graal/graalCodeInstaller.cpp Thu Apr 10 09:14:35 2014 +0200 @@ -22,6 +22,7 @@ */ #include "precompiled.hpp" +#include "code/compiledIC.hpp" #include "compiler/compileBroker.hpp" #include "compiler/disassembler.hpp" #include "runtime/javaCalls.hpp" @@ -474,12 +475,35 @@ _next_call_type = INVOKE_INVALID; } +int CodeInstaller::estimate_stub_entries() { + // Estimate the number of static call stubs that might be emitted. + int static_call_stubs = 0; + for (int i = 0; i < _sites->length(); i++) { + oop site = ((objArrayOop) (_sites))->obj_at(i); + if (site->is_a(CompilationResult_Mark::klass())) { + oop id_obj = CompilationResult_Mark::id(site); + if (id_obj != NULL) { + assert(java_lang_boxing_object::is_instance(id_obj, T_INT), "Integer id expected"); + jint id = id_obj->int_field(java_lang_boxing_object::value_offset_in_bytes(T_INT)); + if (id == INVOKESTATIC || id == INVOKESPECIAL) { + static_call_stubs++; + } + } + } + } + return static_call_stubs; +} + // perform data and call relocation on the CodeBuffer bool CodeInstaller::initialize_buffer(CodeBuffer& buffer) { int locs_buffer_size = _sites->length() * (relocInfo::length_limit + sizeof(relocInfo)); char* locs_buffer = NEW_RESOURCE_ARRAY(char, locs_buffer_size); buffer.insts()->initialize_shared_locs((relocInfo*)locs_buffer, locs_buffer_size / sizeof(relocInfo)); - buffer.initialize_stubs_size(256); + // Allocate enough space in the stub section for the static call + // stubs. Stubs have extra relocs but they are managed by the stub + // section itself so they don't need to be accounted for in the + // locs_buffer above. + buffer.initialize_stubs_size(estimate_stub_entries() * CompiledStaticCall::to_interp_stub_size()); buffer.initialize_consts_size(_constants_size); _debug_recorder = new DebugInformationRecorder(_oop_recorder); @@ -778,6 +802,10 @@ TRACE_graal_3("method call"); CodeInstaller::pd_relocate_JavaMethod(hotspot_method, pc_offset); + if (_next_call_type == INVOKESTATIC || _next_call_type == INVOKESPECIAL) { + // Need a static call stub for transitions from compiled to interpreted. + CompiledStaticCall::emit_to_interp_stub(buffer, _instructions->start() + pc_offset); + } } _next_call_type = INVOKE_INVALID; diff -r 04f1723150b4 -r d4a9d6f6e57d src/share/vm/graal/graalCodeInstaller.hpp --- a/src/share/vm/graal/graalCodeInstaller.hpp Thu Apr 10 00:05:24 2014 +0200 +++ b/src/share/vm/graal/graalCodeInstaller.hpp Thu Apr 10 09:14:35 2014 +0200 @@ -102,7 +102,9 @@ // extract the fields of the CompilationResult void initialize_fields(oop target_method); void initialize_assumptions(oop target_method); - + + int estimate_stub_entries(); + // perform data and call relocation on the CodeBuffer bool initialize_buffer(CodeBuffer& buffer); @@ -120,7 +122,7 @@ void record_scope(jint pc_offset, oop code_pos, GrowableArray* objects); void process_exception_handlers(); - + int estimateStubSpace(int static_call_stubs); }; /** diff -r 04f1723150b4 -r d4a9d6f6e57d src/share/vm/graal/graalCompilerToVM.cpp --- a/src/share/vm/graal/graalCompilerToVM.cpp Thu Apr 10 00:05:24 2014 +0200 +++ b/src/share/vm/graal/graalCompilerToVM.cpp Thu Apr 10 09:14:35 2014 +0200 @@ -782,6 +782,12 @@ return method->lookup_osr_nmethod_for(entry_bci, comp_level, true) != NULL; C2V_END +C2V_VMENTRY(jlong, getTimeStamp, (JNIEnv *env, jobject)) + // tty->time_stamp is the time since VM start which should be used + // for all HotSpot log output when a timestamp is required. + return tty->time_stamp().milliseconds(); +C2V_END + #define CC (char*) /*cast a literal from (const char*)*/ #define FN_PTR(f) CAST_FROM_FN_PTR(void*, &(c2v_ ## f)) @@ -855,6 +861,7 @@ {CC"allocateCompileId", CC"("METASPACE_METHOD"I)I", FN_PTR(allocateCompileId)}, {CC"isMature", CC"("METASPACE_METHOD_DATA")Z", FN_PTR(isMature)}, {CC"hasCompiledCodeForOSR", CC"("METASPACE_METHOD"II)Z", FN_PTR(hasCompiledCodeForOSR)}, + {CC"getTimeStamp", CC"()J", FN_PTR(getTimeStamp)}, }; int CompilerToVM_methods_count() {