# HG changeset patch # User Gilles Duboscq # Date 1381422382 -7200 # Node ID ccb4f2af2319b3bb19a19b79522af445c6d42782 # Parent 58fc8e2b7b6da00d2c77effd3fb3bf36bea09e69 Adapt compressed pointers implementation for last HotSpot changes diff -r 58fc8e2b7b6d -r ccb4f2af2319 graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java Thu Oct 10 17:22:25 2013 +0200 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java Thu Oct 10 18:26:22 2013 +0200 @@ -251,7 +251,7 @@ AMD64HotSpotRuntime hr = ((AMD64HotSpotRuntime) gen.getCodeCache()); if (hr.useCompressedKlassPointers()) { Register register = r10; - AMD64HotSpotMove.decodeKlassPointer(asm, register, hr.heapBaseRegister(), src, config.narrowKlassBase, config.narrowKlassShift, config.logKlassAlignment); + AMD64HotSpotMove.decodeKlassPointer(asm, register, hr.heapBaseRegister(), src, config.narrowKlassBase, config.narrowOopBase, config.narrowKlassShift, config.logKlassAlignment); asm.cmpq(inlineCacheKlass, register); } else { asm.cmpq(inlineCacheKlass, src); diff -r 58fc8e2b7b6d -r ccb4f2af2319 graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotLIRGenerator.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotLIRGenerator.java Thu Oct 10 17:22:25 2013 +0200 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotLIRGenerator.java Thu Oct 10 18:26:22 2013 +0200 @@ -444,11 +444,11 @@ */ if (isCompressCandidate(access)) { if (runtime().useCompressedOops() && kind == Kind.Object) { - append(new LoadCompressedPointer(kind, result, runtime().heapBaseRegister().asValue(), loadAddress, access != null ? state(access) : null, getNarrowOopBase(), getNarrowOopShift(), - getLogMinObjectAlignment())); + append(new LoadCompressedPointer(kind, result, runtime().heapBaseRegister().asValue(), loadAddress, access != null ? state(access) : null, getNarrowKlassBase(), getNarrowOopBase(), + getNarrowOopShift(), getLogMinObjectAlignment())); } else if (runtime().useCompressedKlassPointers() && kind == Kind.Long) { - append(new LoadCompressedPointer(kind, result, runtime().heapBaseRegister().asValue(), loadAddress, access != null ? state(access) : null, getNarrowKlassBase(), getNarrowKlassShift(), - getLogKlassAlignment())); + append(new LoadCompressedPointer(kind, result, runtime().heapBaseRegister().asValue(), loadAddress, access != null ? state(access) : null, getNarrowKlassBase(), getNarrowOopBase(), + getNarrowKlassShift(), getLogKlassAlignment())); } else { append(new LoadOp(kind, result, loadAddress, access != null ? state(access) : null)); } @@ -480,14 +480,14 @@ if (runtime().useCompressedOops() && kind == Kind.Object) { if (input.getKind() == Kind.Object) { Variable scratch = newVariable(Kind.Long); - append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, getNarrowOopBase(), getNarrowOopShift(), getLogMinObjectAlignment())); + append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, getNarrowKlassBase(), getNarrowOopBase(), getNarrowOopShift(), getLogMinObjectAlignment())); } else { // the input oop is already compressed append(new StoreOp(input.getKind(), storeAddress, input, state)); } } else if (runtime().useCompressedKlassPointers() && kind == Kind.Long) { Variable scratch = newVariable(Kind.Long); - append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, getNarrowKlassBase(), getNarrowKlassShift(), getLogKlassAlignment())); + append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, getNarrowKlassBase(), getNarrowOopBase(), getNarrowKlassShift(), getLogKlassAlignment())); } else { append(new StoreOp(kind, storeAddress, input, state)); } diff -r 58fc8e2b7b6d -r ccb4f2af2319 graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotMove.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotMove.java Thu Oct 10 17:22:25 2013 +0200 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotMove.java Thu Oct 10 18:26:22 2013 +0200 @@ -69,14 +69,17 @@ public static class LoadCompressedPointer extends LoadOp { - private long base; - private int shift; - private int alignment; + private final long klassBase; + private final long heapBase; + private final int shift; + private final int alignment; @Alive({REG}) protected AllocatableValue heapBaseRegister; - public LoadCompressedPointer(Kind kind, AllocatableValue result, AllocatableValue heapBaseRegister, AMD64AddressValue address, LIRFrameState state, long base, int shift, int alignment) { + public LoadCompressedPointer(Kind kind, AllocatableValue result, AllocatableValue heapBaseRegister, AMD64AddressValue address, LIRFrameState state, long klassBase, long heapBase, int shift, + int alignment) { super(kind, result, address, state); - this.base = base; + this.klassBase = klassBase; + this.heapBase = heapBase; this.shift = shift; this.alignment = alignment; this.heapBaseRegister = heapBaseRegister; @@ -88,9 +91,9 @@ Register resRegister = asRegister(result); masm.movl(resRegister, address.toAddress()); if (kind == Kind.Object) { - decodePointer(masm, resRegister, asRegister(heapBaseRegister), base, shift, alignment); + decodePointer(masm, resRegister, asRegister(heapBaseRegister), heapBase, shift, alignment); } else { - decodeKlassPointer(masm, resRegister, asRegister(heapBaseRegister), base, shift, alignment); + decodeKlassPointer(masm, resRegister, asRegister(heapBaseRegister), klassBase, heapBase, shift, alignment); } } } @@ -98,16 +101,19 @@ public static class StoreCompressedPointer extends AMD64LIRInstruction { protected final Kind kind; - private long base; - private int shift; - private int alignment; + private final long klassBase; + private final long heapBase; + private final int shift; + private final int alignment; @Temp({REG}) private AllocatableValue scratch; @Alive({REG}) protected AllocatableValue input; @Alive({COMPOSITE}) protected AMD64AddressValue address; @State protected LIRFrameState state; - public StoreCompressedPointer(Kind kind, AMD64AddressValue address, AllocatableValue input, AllocatableValue scratch, LIRFrameState state, long base, int shift, int alignment) { - this.base = base; + public StoreCompressedPointer(Kind kind, AMD64AddressValue address, AllocatableValue input, AllocatableValue scratch, LIRFrameState state, long klassBase, long heapBase, int shift, + int alignment) { + this.klassBase = klassBase; + this.heapBase = heapBase; this.shift = shift; this.alignment = alignment; this.scratch = scratch; @@ -120,12 +126,12 @@ @Override public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) { - Register heapBase = ((HotSpotRuntime) tasm.codeCache).heapBaseRegister(); + Register heapBaseReg = ((HotSpotRuntime) tasm.codeCache).heapBaseRegister(); masm.movq(asRegister(scratch), asRegister(input)); if (kind == Kind.Object) { - encodePointer(masm, asRegister(scratch), heapBase, base, shift, alignment); + encodePointer(masm, asRegister(scratch), heapBaseReg, heapBase, shift, alignment); } else { - encodeKlassPointer(masm, asRegister(scratch), heapBase, base, shift, alignment); + encodeKlassPointer(masm, asRegister(scratch), heapBaseReg, klassBase, heapBase, shift, alignment); } if (state != null) { tasm.recordImplicitException(masm.codeBuffer.position(), state); @@ -220,9 +226,11 @@ } } - private static void encodeKlassPointer(AMD64MacroAssembler masm, Register scratchRegister, Register heapBaseRegister, long base, int shift, int alignment) { - if (base != 0) { + private static void encodeKlassPointer(AMD64MacroAssembler masm, Register scratchRegister, Register heapBaseRegister, long klassBase, long heapBase, int shift, int alignment) { + if (klassBase != 0) { + masm.movq(heapBaseRegister, klassBase); masm.subq(scratchRegister, heapBaseRegister); + restoreHeapBase(masm, heapBaseRegister, heapBase); } if (shift != 0) { assert alignment == shift : "Encode algorithm is wrong"; @@ -230,21 +238,29 @@ } } - private static void decodeKlassPointer(AMD64MacroAssembler masm, Register resRegister, Register heapBaseRegister, long base, int shift, int alignment) { + private static void decodeKlassPointer(AMD64MacroAssembler masm, Register resRegister, Register heapBaseRegister, long klassBase, long heapBase, int shift, int alignment) { if (shift != 0) { assert alignment == shift : "Decode algorithm is wrong"; masm.shlq(resRegister, alignment); - if (base != 0) { - masm.addq(resRegister, heapBaseRegister); - } - } else { - assert base == 0 : "Sanity"; + } + if (klassBase != 0) { + masm.movq(heapBaseRegister, klassBase); + masm.addq(resRegister, heapBaseRegister); + restoreHeapBase(masm, heapBaseRegister, heapBase); } } - public static void decodeKlassPointer(AMD64MacroAssembler masm, Register register, Register heapBaseRegister, AMD64Address address, long narrowKlassBase, int narrowKlassShift, + private static void restoreHeapBase(AMD64MacroAssembler masm, Register heapBaseRegister, long heapBase) { + if (heapBase == 0) { + masm.xorq(heapBaseRegister, heapBaseRegister); + } else { + masm.movq(heapBaseRegister, heapBase); + } + } + + public static void decodeKlassPointer(AMD64MacroAssembler masm, Register register, Register heapBaseRegister, AMD64Address address, long narrowKlassBase, long narrowOopBase, int narrowKlassShift, int logKlassAlignment) { masm.movl(register, address); - decodeKlassPointer(masm, register, heapBaseRegister, narrowKlassBase, narrowKlassShift, logKlassAlignment); + decodeKlassPointer(masm, register, heapBaseRegister, narrowKlassBase, narrowOopBase, narrowKlassShift, logKlassAlignment); } }