001/*
002 * Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
004 *
005 * This code is free software; you can redistribute it and/or modify it
006 * under the terms of the GNU General Public License version 2 only, as
007 * published by the Free Software Foundation.
008 *
009 * This code is distributed in the hope that it will be useful, but WITHOUT
010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
011 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
012 * version 2 for more details (a copy is included in the LICENSE file that
013 * accompanied this code).
014 *
015 * You should have received a copy of the GNU General Public License version
016 * 2 along with this work; if not, write to the Free Software Foundation,
017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
018 *
019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
020 * or visit www.oracle.com if you need additional information or have any
021 * questions.
022 */
023package com.oracle.graal.hotspot.amd64;
024
025import static com.oracle.graal.compiler.common.GraalOptions.*;
026import static jdk.internal.jvmci.amd64.AMD64.*;
027import static jdk.internal.jvmci.code.CallingConvention.Type.*;
028import static jdk.internal.jvmci.code.ValueUtil.*;
029import static jdk.internal.jvmci.common.UnsafeAccess.*;
030
031import java.util.*;
032
033import jdk.internal.jvmci.amd64.*;
034import jdk.internal.jvmci.code.*;
035import jdk.internal.jvmci.hotspot.*;
036import jdk.internal.jvmci.meta.*;
037
038import com.oracle.graal.asm.*;
039import com.oracle.graal.asm.amd64.*;
040import com.oracle.graal.asm.amd64.AMD64Assembler.*;
041import com.oracle.graal.compiler.common.alloc.*;
042import com.oracle.graal.compiler.gen.*;
043import com.oracle.graal.compiler.target.*;
044import com.oracle.graal.hotspot.*;
045import com.oracle.graal.hotspot.meta.*;
046import com.oracle.graal.hotspot.stubs.*;
047import com.oracle.graal.lir.*;
048import com.oracle.graal.lir.amd64.*;
049import com.oracle.graal.lir.asm.*;
050import com.oracle.graal.lir.framemap.*;
051import com.oracle.graal.lir.gen.*;
052import com.oracle.graal.nodes.*;
053import com.oracle.graal.nodes.spi.*;
054
055/**
056 * HotSpot AMD64 specific backend.
057 */
058public class AMD64HotSpotBackend extends HotSpotHostBackend {
059
060    public AMD64HotSpotBackend(HotSpotGraalRuntimeProvider runtime, HotSpotProviders providers) {
061        super(runtime, providers);
062    }
063
064    @Override
065    public FrameMapBuilder newFrameMapBuilder(RegisterConfig registerConfig) {
066        RegisterConfig registerConfigNonNull = registerConfig == null ? getCodeCache().getRegisterConfig() : registerConfig;
067        return new AMD64FrameMapBuilder(newFrameMap(registerConfigNonNull), getCodeCache(), registerConfigNonNull);
068    }
069
070    @Override
071    public FrameMap newFrameMap(RegisterConfig registerConfig) {
072        return new AMD64FrameMap(getCodeCache(), registerConfig, this);
073    }
074
075    @Override
076    public LIRGeneratorTool newLIRGenerator(CallingConvention cc, LIRGenerationResult lirGenRes) {
077        return new AMD64HotSpotLIRGenerator(getProviders(), getRuntime().getConfig(), cc, lirGenRes);
078    }
079
080    @Override
081    public LIRGenerationResult newLIRGenerationResult(String compilationUnitName, LIR lir, FrameMapBuilder frameMapBuilder, ResolvedJavaMethod method, Object stub) {
082        return new AMD64HotSpotLIRGenerationResult(compilationUnitName, lir, frameMapBuilder, stub);
083    }
084
085    @Override
086    public NodeLIRBuilderTool newNodeLIRBuilder(StructuredGraph graph, LIRGeneratorTool lirGen) {
087        return new AMD64HotSpotNodeLIRBuilder(getRuntime(), graph, lirGen);
088    }
089
090    @Override
091    public BytecodeLIRBuilder newBytecodeLIRBuilder(LIRGeneratorTool gen, BytecodeParserTool parser) {
092        return new AMD64HotSpotBytecodeLIRBuilder(gen, parser);
093
094    }
095
096    /**
097     * Emits code to do stack overflow checking.
098     *
099     * @param afterFrameInit specifies if the stack pointer has already been adjusted to allocate
100     *            the current frame
101     * @param isVerifiedEntryPoint specifies if the code buffer is currently at the verified entry
102     *            point
103     */
104    protected static void emitStackOverflowCheck(CompilationResultBuilder crb, int pagesToBang, boolean afterFrameInit, boolean isVerifiedEntryPoint) {
105        if (pagesToBang > 0) {
106
107            AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
108            int frameSize = crb.frameMap.frameSize();
109            if (frameSize > 0) {
110                int lastFramePage = frameSize / unsafe.pageSize();
111                // emit multiple stack bangs for methods with frames larger than a page
112                for (int i = 0; i <= lastFramePage; i++) {
113                    int disp = (i + pagesToBang) * unsafe.pageSize();
114                    if (afterFrameInit) {
115                        disp -= frameSize;
116                    }
117                    crb.blockComment("[stack overflow check]");
118                    int pos = asm.position();
119                    asm.movl(new AMD64Address(rsp, -disp), AMD64.rax);
120                    assert i > 0 || !isVerifiedEntryPoint || asm.position() - pos >= PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE;
121                }
122            }
123        }
124    }
125
126    /**
127     * The size of the instruction used to patch the verified entry point of an nmethod when the
128     * nmethod is made non-entrant or a zombie (e.g. during deopt or class unloading). The first
129     * instruction emitted at an nmethod's verified entry point must be at least this length to
130     * ensure mt-safe patching.
131     */
132    public static final int PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE = 5;
133
134    /**
135     * Emits code at the verified entry point and return point(s) of a method.
136     */
137    class HotSpotFrameContext implements FrameContext {
138
139        final boolean isStub;
140        final boolean omitFrame;
141
142        HotSpotFrameContext(boolean isStub, boolean omitFrame) {
143            this.isStub = isStub;
144            this.omitFrame = omitFrame;
145        }
146
147        public boolean hasFrame() {
148            return !omitFrame;
149        }
150
151        @Override
152        public void enter(CompilationResultBuilder crb) {
153            FrameMap frameMap = crb.frameMap;
154            int frameSize = frameMap.frameSize();
155            AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
156            if (omitFrame) {
157                if (!isStub) {
158                    asm.nop(PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE);
159                }
160            } else {
161                int verifiedEntryPointOffset = asm.position();
162                if (!isStub && pagesToBang > 0) {
163                    emitStackOverflowCheck(crb, pagesToBang, false, true);
164                    assert asm.position() - verifiedEntryPointOffset >= PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE;
165                }
166                if (!isStub && asm.position() == verifiedEntryPointOffset) {
167                    asm.subqWide(rsp, frameSize);
168                    assert asm.position() - verifiedEntryPointOffset >= PATCHED_VERIFIED_ENTRY_POINT_INSTRUCTION_SIZE;
169                } else {
170                    asm.decrementq(rsp, frameSize);
171                }
172                if (ZapStackOnMethodEntry.getValue()) {
173                    final int intSize = 4;
174                    for (int i = 0; i < frameSize / intSize; ++i) {
175                        asm.movl(new AMD64Address(rsp, i * intSize), 0xC1C1C1C1);
176                    }
177                }
178                CalleeSaveLayout csl = frameMap.getRegisterConfig().getCalleeSaveLayout();
179                if (csl != null && csl.size != 0) {
180                    int frameToCSA = frameMap.offsetToCalleeSaveArea();
181                    assert frameToCSA >= 0;
182                    asm.save(csl, frameToCSA);
183                }
184            }
185        }
186
187        @Override
188        public void leave(CompilationResultBuilder crb) {
189            if (!omitFrame) {
190                AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
191                CalleeSaveLayout csl = crb.frameMap.getRegisterConfig().getCalleeSaveLayout();
192
193                if (csl != null && csl.size != 0) {
194                    // saved all registers, restore all registers
195                    int frameToCSA = crb.frameMap.offsetToCalleeSaveArea();
196                    asm.restore(csl, frameToCSA);
197                }
198
199                int frameSize = crb.frameMap.frameSize();
200                asm.incrementq(rsp, frameSize);
201            }
202        }
203    }
204
205    @Override
206    protected Assembler createAssembler(FrameMap frameMap) {
207        return new AMD64MacroAssembler(getTarget(), frameMap.getRegisterConfig());
208    }
209
210    @Override
211    public CompilationResultBuilder newCompilationResultBuilder(LIRGenerationResult lirGenRen, FrameMap frameMap, CompilationResult compilationResult, CompilationResultBuilderFactory factory) {
212        // Omit the frame if the method:
213        // - has no spill slots or other slots allocated during register allocation
214        // - has no callee-saved registers
215        // - has no incoming arguments passed on the stack
216        // - has no deoptimization points
217        // - makes no foreign calls (which require an aligned stack)
218        AMD64HotSpotLIRGenerationResult gen = (AMD64HotSpotLIRGenerationResult) lirGenRen;
219        LIR lir = gen.getLIR();
220        assert gen.getDeoptimizationRescueSlot() == null || frameMap.frameNeedsAllocating() : "method that can deoptimize must have a frame";
221        boolean omitFrame = CanOmitFrame.getValue() && !frameMap.frameNeedsAllocating() && !lir.hasArgInCallerFrame() && !gen.hasForeignCall();
222
223        Stub stub = gen.getStub();
224        Assembler masm = createAssembler(frameMap);
225        HotSpotFrameContext frameContext = new HotSpotFrameContext(stub != null, omitFrame);
226        CompilationResultBuilder crb = factory.createBuilder(getCodeCache(), getForeignCalls(), frameMap, masm, frameContext, compilationResult);
227        crb.setTotalFrameSize(frameMap.totalFrameSize());
228        StackSlot deoptimizationRescueSlot = gen.getDeoptimizationRescueSlot();
229        if (deoptimizationRescueSlot != null && stub == null) {
230            crb.compilationResult.setCustomStackAreaOffset(frameMap.offsetForStackSlot(deoptimizationRescueSlot));
231        }
232
233        if (stub != null) {
234            Set<Register> definedRegisters = gatherDefinedRegisters(lir);
235            updateStub(stub, definedRegisters, gen.getCalleeSaveInfo(), frameMap);
236        }
237
238        return crb;
239    }
240
241    @Override
242    public void emitCode(CompilationResultBuilder crb, LIR lir, ResolvedJavaMethod installedCodeOwner) {
243        AMD64MacroAssembler asm = (AMD64MacroAssembler) crb.asm;
244        FrameMap frameMap = crb.frameMap;
245        RegisterConfig regConfig = frameMap.getRegisterConfig();
246        HotSpotVMConfig config = getRuntime().getConfig();
247        Label verifiedEntry = new Label();
248
249        // Emit the prefix
250        emitCodePrefix(installedCodeOwner, crb, asm, regConfig, config, verifiedEntry);
251
252        // Emit code for the LIR
253        emitCodeBody(installedCodeOwner, crb, lir);
254
255        // Emit the suffix
256        emitCodeSuffix(installedCodeOwner, crb, asm, config, frameMap);
257
258        // Profile assembler instructions
259        profileInstructions(lir, crb);
260    }
261
262    /**
263     * Emits the code prior to the verified entry point.
264     *
265     * @param installedCodeOwner see {@link Backend#emitCode}
266     */
267    public void emitCodePrefix(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, AMD64MacroAssembler asm, RegisterConfig regConfig, HotSpotVMConfig config, Label verifiedEntry) {
268        HotSpotProviders providers = getProviders();
269        if (installedCodeOwner != null && !installedCodeOwner.isStatic()) {
270            crb.recordMark(config.MARKID_UNVERIFIED_ENTRY);
271            CallingConvention cc = regConfig.getCallingConvention(JavaCallee, null, new JavaType[]{providers.getMetaAccess().lookupJavaType(Object.class)}, getTarget(), false);
272            Register inlineCacheKlass = rax; // see definition of IC_Klass in
273                                             // c1_LIRAssembler_x86.cpp
274            Register receiver = asRegister(cc.getArgument(0));
275            AMD64Address src = new AMD64Address(receiver, config.hubOffset);
276
277            if (config.useCompressedClassPointers) {
278                Register register = r10;
279                AMD64HotSpotMove.decodeKlassPointer(asm, register, providers.getRegisters().getHeapBaseRegister(), src, config.getKlassEncoding());
280                if (config.narrowKlassBase != 0) {
281                    // The heap base register was destroyed above, so restore it
282                    asm.movq(providers.getRegisters().getHeapBaseRegister(), config.narrowOopBase);
283                }
284                asm.cmpq(inlineCacheKlass, register);
285            } else {
286                asm.cmpq(inlineCacheKlass, src);
287            }
288            AMD64Call.directConditionalJmp(crb, asm, getForeignCalls().lookupForeignCall(IC_MISS_HANDLER), ConditionFlag.NotEqual);
289        }
290
291        asm.align(config.codeEntryAlignment);
292        crb.recordMark(config.MARKID_OSR_ENTRY);
293        asm.bind(verifiedEntry);
294        crb.recordMark(config.MARKID_VERIFIED_ENTRY);
295    }
296
297    /**
298     * Emits the code which starts at the verified entry point.
299     *
300     * @param installedCodeOwner see {@link Backend#emitCode}
301     */
302    public void emitCodeBody(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, LIR lir) {
303        crb.emit(lir);
304    }
305
306    /**
307     * @param installedCodeOwner see {@link Backend#emitCode}
308     * @param config
309     */
310    public void emitCodeSuffix(ResolvedJavaMethod installedCodeOwner, CompilationResultBuilder crb, AMD64MacroAssembler asm, HotSpotVMConfig config, FrameMap frameMap) {
311        HotSpotProviders providers = getProviders();
312        HotSpotFrameContext frameContext = (HotSpotFrameContext) crb.frameContext;
313        if (!frameContext.isStub) {
314            HotSpotForeignCallsProvider foreignCalls = providers.getForeignCalls();
315            crb.recordMark(config.MARKID_EXCEPTION_HANDLER_ENTRY);
316            AMD64Call.directCall(crb, asm, foreignCalls.lookupForeignCall(EXCEPTION_HANDLER), null, false, null);
317            crb.recordMark(config.MARKID_DEOPT_HANDLER_ENTRY);
318            AMD64Call.directCall(crb, asm, foreignCalls.lookupForeignCall(DEOPTIMIZATION_HANDLER), null, false, null);
319        } else {
320            // No need to emit the stubs for entries back into the method since
321            // it has no calls that can cause such "return" entries
322
323            if (frameContext.omitFrame) {
324                // Cannot access slots in caller's frame if my frame is omitted
325                assert !frameMap.accessesCallerFrame();
326            }
327        }
328    }
329
330    @Override
331    public RegisterAllocationConfig newRegisterAllocationConfig(RegisterConfig registerConfig) {
332        RegisterConfig registerConfigNonNull = registerConfig == null ? getCodeCache().getRegisterConfig() : registerConfig;
333        return new AMD64HotSpotRegisterAllocationConfig(registerConfigNonNull);
334    }
335
336}