changeset 23370:39f9960bb6ae

Ensure the tail call machinery for Truffle follows hotspot rules for first instruction size
author Tom Rodriguez <tom.rodriguez@oracle.com>
date Wed, 03 Feb 2016 13:14:14 -0800
parents 533b3e243531
children 882be1c92a85
files graal/com.oracle.graal.asm.amd64/src/com/oracle/graal/asm/amd64/AMD64Assembler.java graal/com.oracle.graal.truffle.hotspot.amd64/src/com/oracle/graal/truffle/hotspot/amd64/AMD64OptimizedCallTargetInstrumentationFactory.java
diffstat 2 files changed, 33 insertions(+), 9 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.asm.amd64/src/com/oracle/graal/asm/amd64/AMD64Assembler.java	Wed Feb 03 19:20:07 2016 +0100
+++ b/graal/com.oracle.graal.asm.amd64/src/com/oracle/graal/asm/amd64/AMD64Assembler.java	Wed Feb 03 13:14:14 2016 -0800
@@ -386,19 +386,32 @@
         emitModRM(reg.encoding & 0x07, rm);
     }
 
+    protected void emitOperandHelper(Register reg, AMD64Address addr) {
+        assert !reg.equals(Register.None);
+        emitOperandHelper(encode(reg), addr, false);
+    }
+
     /**
      * Emits the ModR/M byte and optionally the SIB byte for one register and one memory operand.
+     *
+     * @param force4Byte use 4 byte encoding for displacements that would normally fit in a byte
      */
-    protected void emitOperandHelper(Register reg, AMD64Address addr) {
+    protected void emitOperandHelper(Register reg, AMD64Address addr, boolean force4Byte) {
         assert !reg.equals(Register.None);
-        emitOperandHelper(encode(reg), addr);
+        emitOperandHelper(encode(reg), addr, force4Byte);
+    }
+
+    protected void emitOperandHelper(int reg, AMD64Address addr) {
+        emitOperandHelper(reg, addr, false);
     }
 
     /**
      * Emits the ModR/M byte and optionally the SIB byte for one memory operand and an opcode
      * extension in the R field.
+     *
+     * @param force4Byte use 4 byte encoding for displacements that would normally fit in a byte
      */
-    protected void emitOperandHelper(int reg, AMD64Address addr) {
+    protected void emitOperandHelper(int reg, AMD64Address addr, boolean force4Byte) {
         assert (reg & 0x07) == reg;
         int regenc = reg << 3;
 
@@ -429,7 +442,7 @@
                     assert !index.equals(rsp) : "illegal addressing mode";
                     emitByte(0x04 | regenc);
                     emitByte(scale.log2 << 6 | indexenc | baseenc);
-                } else if (isByte(disp)) {
+                } else if (isByte(disp) && !force4Byte) {
                     // [base + indexscale + imm8]
                     // [01 reg 100][ss index base] imm8
                     assert !index.equals(rsp) : "illegal addressing mode";
@@ -451,7 +464,7 @@
                     // [00 reg 100][00 100 100]
                     emitByte(0x04 | regenc);
                     emitByte(0x24);
-                } else if (isByte(disp)) {
+                } else if (isByte(disp) && !force4Byte) {
                     // [rsp + imm8]
                     // [01 reg 100][00 100 100] disp8
                     emitByte(0x44 | regenc);
@@ -471,7 +484,7 @@
                     // [base]
                     // [00 reg base]
                     emitByte(0x00 | regenc | baseenc);
-                } else if (isByte(disp)) {
+                } else if (isByte(disp) && !force4Byte) {
                     // [base + disp8]
                     // [01 reg base] disp8
                     emitByte(0x40 | regenc | baseenc);
@@ -1334,16 +1347,20 @@
     }
 
     public final void movq(Register dst, AMD64Address src) {
+        movq(dst, src, false);
+    }
+
+    public final void movq(Register dst, AMD64Address src, boolean wide) {
         if (dst.getRegisterCategory().equals(AMD64.XMM)) {
             emitByte(0xF3);
             prefixq(src, dst);
             emitByte(0x0F);
             emitByte(0x7E);
-            emitOperandHelper(dst, src);
+            emitOperandHelper(dst, src, wide);
         } else {
             prefixq(src, dst);
             emitByte(0x8B);
-            emitOperandHelper(dst, src);
+            emitOperandHelper(dst, src, wide);
         }
     }
 
--- a/graal/com.oracle.graal.truffle.hotspot.amd64/src/com/oracle/graal/truffle/hotspot/amd64/AMD64OptimizedCallTargetInstrumentationFactory.java	Wed Feb 03 19:20:07 2016 +0100
+++ b/graal/com.oracle.graal.truffle.hotspot.amd64/src/com/oracle/graal/truffle/hotspot/amd64/AMD64OptimizedCallTargetInstrumentationFactory.java	Wed Feb 03 13:14:14 2016 -0800
@@ -36,6 +36,7 @@
 import com.oracle.graal.asm.amd64.AMD64MacroAssembler;
 import com.oracle.graal.code.CompilationResult;
 import com.oracle.graal.compiler.common.spi.ForeignCallsProvider;
+import com.oracle.graal.hotspot.amd64.AMD64HotSpotBackend;
 import com.oracle.graal.lir.asm.CompilationResultBuilder;
 import com.oracle.graal.lir.asm.DataBuilder;
 import com.oracle.graal.lir.asm.FrameContext;
@@ -57,9 +58,15 @@
                 Register thisRegister = codeCache.getRegisterConfig().getCallingConventionRegisters(JavaCall, JavaKind.Object)[0];
                 Register spillRegister = AMD64.r10; // TODO(mg): fix me
                 Label doProlog = new Label();
+                int pos = asm.position();
 
                 AMD64Address codeBlobAddress = new AMD64Address(thisRegister, getFieldOffset("entryPoint", InstalledCode.class));
-                asm.movq(spillRegister, codeBlobAddress);
+                /*
+                 * The first instruction must be at least 5 bytes long to be safe for not entrant
+                 * patching, so force a wider encoding of the movq instruction.
+                 */
+                asm.movq(spillRegister, codeBlobAddress, true);
+                assert asm.position() - pos >= AMD64HotSpotBackend.PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE;
                 asm.testq(spillRegister, spillRegister);
                 asm.jcc(ConditionFlag.Equal, doProlog);
                 asm.jmp(spillRegister);