001/* 002 * Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved. 003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 004 * 005 * This code is free software; you can redistribute it and/or modify it 006 * under the terms of the GNU General Public License version 2 only, as 007 * published by the Free Software Foundation. 008 * 009 * This code is distributed in the hope that it will be useful, but WITHOUT 010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 011 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 012 * version 2 for more details (a copy is included in the LICENSE file that 013 * accompanied this code). 014 * 015 * You should have received a copy of the GNU General Public License version 016 * 2 along with this work; if not, write to the Free Software Foundation, 017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 018 * 019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 020 * or visit www.oracle.com if you need additional information or have any 021 * questions. 022 */ 023package com.oracle.graal.hotspot.sparc; 024 025import static com.oracle.graal.asm.sparc.SPARCAssembler.Annul.*; 026import static com.oracle.graal.asm.sparc.SPARCAssembler.BranchPredict.*; 027import static com.oracle.graal.asm.sparc.SPARCAssembler.CC.*; 028import static com.oracle.graal.asm.sparc.SPARCAssembler.ConditionFlag.*; 029import static com.oracle.graal.compiler.common.GraalOptions.*; 030import static jdk.internal.jvmci.code.CallingConvention.Type.*; 031import static jdk.internal.jvmci.code.ValueUtil.*; 032import static jdk.internal.jvmci.common.UnsafeAccess.*; 033import static jdk.internal.jvmci.sparc.SPARC.*; 034 035import java.util.*; 036import java.util.concurrent.*; 037 038import jdk.internal.jvmci.code.*; 039import jdk.internal.jvmci.code.DataSection.Data; 040import com.oracle.graal.debug.*; 041import jdk.internal.jvmci.hotspot.*; 042import jdk.internal.jvmci.meta.*; 043 044import com.oracle.graal.asm.*; 045import com.oracle.graal.asm.sparc.*; 046import com.oracle.graal.asm.sparc.SPARCMacroAssembler.ScratchRegister; 047import com.oracle.graal.asm.sparc.SPARCMacroAssembler.Setx; 048import com.oracle.graal.compiler.common.alloc.*; 049import com.oracle.graal.compiler.common.cfg.*; 050import com.oracle.graal.hotspot.*; 051import com.oracle.graal.hotspot.meta.*; 052import com.oracle.graal.hotspot.stubs.*; 053import com.oracle.graal.lir.*; 054import com.oracle.graal.lir.StandardOp.SaveRegistersOp; 055import com.oracle.graal.lir.asm.*; 056import com.oracle.graal.lir.framemap.*; 057import com.oracle.graal.lir.gen.*; 058import com.oracle.graal.lir.sparc.*; 059import com.oracle.graal.lir.sparc.SPARCLIRInstruction.SizeEstimate; 060import com.oracle.graal.nodes.*; 061import com.oracle.graal.nodes.spi.*; 062 063/** 064 * HotSpot SPARC specific backend. 065 */ 066public class SPARCHotSpotBackend extends HotSpotHostBackend { 067 068 private static final SizeEstimateStatistics CONSTANT_ESTIMATED_STATS = new SizeEstimateStatistics("ESTIMATE"); 069 private static final SizeEstimateStatistics CONSTANT_ACTUAL_STATS = new SizeEstimateStatistics("ACTUAL"); 070 071 public SPARCHotSpotBackend(HotSpotGraalRuntimeProvider runtime, HotSpotProviders providers) { 072 super(runtime, providers); 073 } 074 075 private static class SizeEstimateStatistics { 076 private static final ConcurrentHashMap<String, DebugMetric> metrics = new ConcurrentHashMap<>(); 077 private final String suffix; 078 079 public SizeEstimateStatistics(String suffix) { 080 super(); 081 this.suffix = suffix; 082 } 083 084 public void add(Class<?> c, int count) { 085 String name = SizeEstimateStatistics.class.getSimpleName() + "_" + c.getSimpleName() + "." + suffix; 086 DebugMetric m = metrics.computeIfAbsent(name, (n) -> Debug.metric(n)); 087 m.add(count); 088 } 089 } 090 091 @Override 092 public FrameMapBuilder newFrameMapBuilder(RegisterConfig registerConfig) { 093 RegisterConfig registerConfigNonNull = registerConfig == null ? getCodeCache().getRegisterConfig() : registerConfig; 094 return new SPARCFrameMapBuilder(newFrameMap(registerConfigNonNull), getCodeCache(), registerConfigNonNull); 095 } 096 097 @Override 098 public FrameMap newFrameMap(RegisterConfig registerConfig) { 099 return new SPARCFrameMap(getCodeCache(), registerConfig, this); 100 } 101 102 @Override 103 public LIRGeneratorTool newLIRGenerator(CallingConvention cc, LIRGenerationResult lirGenRes) { 104 return new SPARCHotSpotLIRGenerator(getProviders(), getRuntime().getConfig(), cc, lirGenRes); 105 } 106 107 @Override 108 public LIRGenerationResult newLIRGenerationResult(String compilationUnitName, LIR lir, FrameMapBuilder frameMapBuilder, ResolvedJavaMethod method, Object stub) { 109 return new SPARCHotSpotLIRGenerationResult(compilationUnitName, lir, frameMapBuilder, stub); 110 } 111 112 @Override 113 public NodeLIRBuilderTool newNodeLIRBuilder(StructuredGraph graph, LIRGeneratorTool lirGen) { 114 return new SPARCHotSpotNodeLIRBuilder(getRuntime(), graph, lirGen); 115 } 116 117 /** 118 * Emits code to do stack overflow checking. 119 * 120 * @param afterFrameInit specifies if the stack pointer has already been adjusted to allocate 121 * the current frame 122 */ 123 protected static void emitStackOverflowCheck(CompilationResultBuilder crb, int pagesToBang, boolean afterFrameInit) { 124 if (pagesToBang > 0) { 125 SPARCMacroAssembler masm = (SPARCMacroAssembler) crb.asm; 126 final int frameSize = crb.frameMap.totalFrameSize(); 127 if (frameSize > 0) { 128 int lastFramePage = frameSize / unsafe.pageSize(); 129 // emit multiple stack bangs for methods with frames larger than a page 130 for (int i = 0; i <= lastFramePage; i++) { 131 int disp = (i + pagesToBang) * unsafe.pageSize(); 132 if (afterFrameInit) { 133 disp -= frameSize; 134 } 135 crb.blockComment("[stack overflow check]"); 136 // Use SPARCAddress to get the final displacement including the stack bias. 137 SPARCAddress address = new SPARCAddress(sp, -disp); 138 if (SPARCAssembler.isSimm13(address.getDisplacement())) { 139 masm.stx(g0, address); 140 } else { 141 try (ScratchRegister sc = masm.getScratchRegister()) { 142 Register scratch = sc.getRegister(); 143 assert afterFrameInit || isGlobalRegister(scratch) : "Only global (g1-g7) registers are allowed if the frame was not initialized here. Got register " + scratch; 144 new Setx(address.getDisplacement(), scratch).emit(masm); 145 masm.stx(g0, new SPARCAddress(sp, scratch)); 146 } 147 } 148 } 149 } 150 } 151 } 152 153 public class HotSpotFrameContext implements FrameContext { 154 155 final boolean isStub; 156 157 HotSpotFrameContext(boolean isStub) { 158 this.isStub = isStub; 159 } 160 161 public boolean hasFrame() { 162 return true; 163 } 164 165 @Override 166 public void enter(CompilationResultBuilder crb) { 167 final int frameSize = crb.frameMap.totalFrameSize(); 168 final int stackpoinerChange = -frameSize; 169 SPARCMacroAssembler masm = (SPARCMacroAssembler) crb.asm; 170 if (!isStub && pagesToBang > 0) { 171 emitStackOverflowCheck(crb, pagesToBang, false); 172 } 173 174 if (SPARCAssembler.isSimm13(stackpoinerChange)) { 175 masm.save(sp, stackpoinerChange, sp); 176 } else { 177 try (ScratchRegister sc = masm.getScratchRegister()) { 178 Register scratch = sc.getRegister(); 179 assert isGlobalRegister(scratch) : "Only global registers are allowed before save. Got register " + scratch; 180 new Setx(stackpoinerChange, scratch).emit(masm); 181 masm.save(sp, scratch, sp); 182 } 183 } 184 185 if (ZapStackOnMethodEntry.getValue()) { 186 final int slotSize = 8; 187 for (int i = 0; i < frameSize / slotSize; ++i) { 188 // 0xC1C1C1C1 189 masm.stx(g0, new SPARCAddress(sp, i * slotSize)); 190 } 191 } 192 } 193 194 @Override 195 public void leave(CompilationResultBuilder crb) { 196 SPARCMacroAssembler masm = (SPARCMacroAssembler) crb.asm; 197 masm.restoreWindow(); 198 } 199 } 200 201 @Override 202 protected Assembler createAssembler(FrameMap frameMap) { 203 return new SPARCMacroAssembler(getTarget(), frameMap.getRegisterConfig()); 204 } 205 206 @Override 207 public CompilationResultBuilder newCompilationResultBuilder(LIRGenerationResult lirGenRes, FrameMap frameMap, CompilationResult compilationResult, CompilationResultBuilderFactory factory) { 208 SPARCHotSpotLIRGenerationResult gen = (SPARCHotSpotLIRGenerationResult) lirGenRes; 209 LIR lir = gen.getLIR(); 210 assert gen.getDeoptimizationRescueSlot() == null || frameMap.frameNeedsAllocating() : "method that can deoptimize must have a frame"; 211 212 Stub stub = gen.getStub(); 213 Assembler masm = createAssembler(frameMap); 214 // On SPARC we always use stack frames. 215 HotSpotFrameContext frameContext = new HotSpotFrameContext(stub != null); 216 CompilationResultBuilder crb = factory.createBuilder(getProviders().getCodeCache(), getProviders().getForeignCalls(), frameMap, masm, frameContext, compilationResult); 217 crb.setTotalFrameSize(frameMap.totalFrameSize()); 218 StackSlot deoptimizationRescueSlot = gen.getDeoptimizationRescueSlot(); 219 if (deoptimizationRescueSlot != null && stub == null) { 220 crb.compilationResult.setCustomStackAreaOffset(frameMap.offsetForStackSlot(deoptimizationRescueSlot)); 221 } 222 223 if (stub != null) { 224 // Even on sparc we need to save floating point registers 225 Set<Register> definedRegisters = gatherDefinedRegisters(lir); 226 Map<LIRFrameState, SaveRegistersOp> calleeSaveInfo = gen.getCalleeSaveInfo(); 227 updateStub(stub, definedRegisters, calleeSaveInfo, frameMap); 228 } 229 assert registerSizePredictionValidator(crb); 230 return crb; 231 } 232 233 /** 234 * Registers a verifier which checks if the LIRInstructions estimate of constants size is 235 * greater or equal to the actual one. 236 */ 237 private static boolean registerSizePredictionValidator(final CompilationResultBuilder crb) { 238 /** 239 * Used to hold state between beforeOp and afterOp 240 */ 241 class ValidationState { 242 LIRInstruction op; 243 int constantSizeBefore; 244 245 public void before(LIRInstruction before) { 246 assert op == null : "LIRInstruction " + op + " no after call received"; 247 op = before; 248 constantSizeBefore = calculateDataSectionSize(crb.compilationResult.getDataSection()); 249 } 250 251 public void after(LIRInstruction after) { 252 assert after.equals(op) : "Instructions before/after don't match " + op + "/" + after; 253 int constantSizeAfter = calculateDataSectionSize(crb.compilationResult.getDataSection()); 254 int actual = constantSizeAfter - constantSizeBefore; 255 if (op instanceof SPARCLIRInstruction) { 256 SizeEstimate size = ((SPARCLIRInstruction) op).estimateSize(); 257 assert size != null : "No size prediction available for op: " + op; 258 Class<?> c = op.getClass(); 259 CONSTANT_ESTIMATED_STATS.add(c, size.constantSize); 260 CONSTANT_ACTUAL_STATS.add(c, actual); 261 assert size.constantSize >= actual : "Op " + op + " exceeded estimated constant size; predicted: " + size.constantSize + " actual: " + actual; 262 } else { 263 assert actual == 0 : "Op " + op + " emitted to DataSection without any estimate."; 264 } 265 op = null; 266 constantSizeBefore = 0; 267 } 268 } 269 final ValidationState state = new ValidationState(); 270 crb.setOpCallback(op -> state.before(op), op -> state.after(op)); 271 return true; 272 } 273 274 private static int calculateDataSectionSize(DataSection ds) { 275 int sum = 0; 276 for (Data d : ds) { 277 sum += d.getSize(); 278 } 279 return sum; 280 } 281 282 @Override 283 public void emitCode(CompilationResultBuilder crb, LIR lir, ResolvedJavaMethod installedCodeOwner) { 284 SPARCMacroAssembler masm = (SPARCMacroAssembler) crb.asm; 285 // TODO: (sa) Fold the two traversals into one 286 stuffDelayedControlTransfers(lir); 287 int constantSize = calculateConstantSize(lir); 288 boolean canUseImmediateConstantLoad = constantSize < (1 << 13); 289 masm.setImmediateConstantLoad(canUseImmediateConstantLoad); 290 FrameMap frameMap = crb.frameMap; 291 RegisterConfig regConfig = frameMap.getRegisterConfig(); 292 HotSpotVMConfig config = getRuntime().getConfig(); 293 Label unverifiedStub = installedCodeOwner == null || installedCodeOwner.isStatic() ? null : new Label(); 294 boolean hasUnsafeAccess = crb.compilationResult.hasUnsafeAccess(); 295 int i = 0; 296 do { 297 if (i > 0) { 298 crb.reset(); 299 lir.resetLabels(); 300 resetDelayedControlTransfers(lir); 301 } 302 303 // Emit the prefix 304 if (unverifiedStub != null) { 305 crb.recordMark(config.MARKID_UNVERIFIED_ENTRY); 306 // We need to use JavaCall here because we haven't entered the frame yet. 307 CallingConvention cc = regConfig.getCallingConvention(JavaCall, null, new JavaType[]{getProviders().getMetaAccess().lookupJavaType(Object.class)}, getTarget(), false); 308 Register inlineCacheKlass = g5; // see MacroAssembler::ic_call 309 310 try (ScratchRegister sc = masm.getScratchRegister()) { 311 Register scratch = sc.getRegister(); 312 Register receiver = asRegister(cc.getArgument(0)); 313 SPARCAddress src = new SPARCAddress(receiver, config.hubOffset); 314 315 masm.ldx(src, scratch); 316 masm.cmp(scratch, inlineCacheKlass); 317 } 318 masm.bpcc(NotEqual, NOT_ANNUL, unverifiedStub, Xcc, PREDICT_NOT_TAKEN); 319 masm.nop(); // delay slot 320 } 321 322 masm.align(config.codeEntryAlignment); 323 crb.recordMark(config.MARKID_OSR_ENTRY); 324 crb.recordMark(config.MARKID_VERIFIED_ENTRY); 325 326 // Emit code for the LIR 327 crb.emit(lir); 328 } while (i++ < 1); 329 // Restore the unsafeAccess flag 330 crb.compilationResult.setHasUnsafeAccess(hasUnsafeAccess); 331 profileInstructions(lir, crb); 332 333 HotSpotFrameContext frameContext = (HotSpotFrameContext) crb.frameContext; 334 HotSpotForeignCallsProvider foreignCalls = getProviders().getForeignCalls(); 335 if (!frameContext.isStub) { 336 crb.recordMark(config.MARKID_EXCEPTION_HANDLER_ENTRY); 337 SPARCCall.directCall(crb, masm, foreignCalls.lookupForeignCall(EXCEPTION_HANDLER), null, null); 338 crb.recordMark(config.MARKID_DEOPT_HANDLER_ENTRY); 339 SPARCCall.directCall(crb, masm, foreignCalls.lookupForeignCall(DEOPTIMIZATION_HANDLER), null, null); 340 } else { 341 // No need to emit the stubs for entries back into the method since 342 // it has no calls that can cause such "return" entries 343 } 344 345 if (unverifiedStub != null) { 346 masm.bind(unverifiedStub); 347 try (ScratchRegister sc = masm.getScratchRegister()) { 348 Register scratch = sc.getRegister(); 349 SPARCCall.indirectJmp(crb, masm, scratch, foreignCalls.lookupForeignCall(IC_MISS_HANDLER)); 350 } 351 } 352 masm.peephole(); 353 } 354 355 private static int calculateConstantSize(LIR lir) { 356 int size = 0; 357 for (AbstractBlockBase<?> block : lir.codeEmittingOrder()) { 358 for (LIRInstruction inst : lir.getLIRforBlock(block)) { 359 if (inst instanceof SPARCLIRInstruction) { 360 SizeEstimate pred = ((SPARCLIRInstruction) inst).estimateSize(); 361 if (pred != null) { 362 size += pred.constantSize; 363 } 364 } 365 } 366 } 367 return size; 368 } 369 370 private static void resetDelayedControlTransfers(LIR lir) { 371 for (AbstractBlockBase<?> block : lir.codeEmittingOrder()) { 372 for (LIRInstruction inst : lir.getLIRforBlock(block)) { 373 if (inst instanceof SPARCDelayedControlTransfer) { 374 ((SPARCDelayedControlTransfer) inst).resetState(); 375 } 376 } 377 } 378 } 379 380 /** 381 * Fix-up over whole LIR. 382 * 383 * @see #stuffDelayedControlTransfers(LIR, AbstractBlockBase) 384 * @param l 385 */ 386 private static void stuffDelayedControlTransfers(LIR l) { 387 for (AbstractBlockBase<?> b : l.codeEmittingOrder()) { 388 stuffDelayedControlTransfers(l, b); 389 } 390 } 391 392 /** 393 * Tries to put DelayedControlTransfer instructions and DelayableLIRInstructions together. Also 394 * it tries to move the DelayedLIRInstruction to the DelayedControlTransfer instruction, if 395 * possible. 396 */ 397 private static void stuffDelayedControlTransfers(LIR l, AbstractBlockBase<?> block) { 398 List<LIRInstruction> instructions = l.getLIRforBlock(block); 399 if (instructions.size() >= 2) { 400 LIRDependencyAccumulator acc = new LIRDependencyAccumulator(); 401 SPARCDelayedControlTransfer delayedTransfer = null; 402 int delayTransferPosition = -1; 403 for (int i = instructions.size() - 1; i >= 0; i--) { 404 LIRInstruction inst = instructions.get(i); 405 boolean adjacent = delayTransferPosition - i == 1; 406 if (!adjacent || inst.destroysCallerSavedRegisters() || leavesRegisterWindow(inst)) { 407 delayedTransfer = null; 408 } 409 if (inst instanceof SPARCDelayedControlTransfer) { 410 delayedTransfer = (SPARCDelayedControlTransfer) inst; 411 acc.start(inst); 412 delayTransferPosition = i; 413 } else if (delayedTransfer != null) { 414 boolean overlap = acc.add(inst); 415 if (!overlap && inst instanceof SPARCTailDelayedLIRInstruction) { 416 // We have found a non overlapping LIR instruction which can be delayed 417 ((SPARCTailDelayedLIRInstruction) inst).setDelayedControlTransfer(delayedTransfer); 418 delayedTransfer = null; 419 } 420 } 421 } 422 } 423 } 424 425 private static boolean leavesRegisterWindow(LIRInstruction inst) { 426 return inst instanceof SPARCLIRInstruction && ((SPARCLIRInstruction) inst).leavesRegisterWindow(); 427 } 428 429 /** 430 * Accumulates inputs/outputs/temp/alive in a set along we walk back the LIRInstructions and 431 * detects, if there is any overlap. In this way LIRInstructions can be detected, which can be 432 * moved nearer to the DelayedControlTransfer instruction. 433 */ 434 private static class LIRDependencyAccumulator { 435 private final Set<Object> inputs = new HashSet<>(10); 436 private boolean overlap = false; 437 438 private final InstructionValueConsumer valueConsumer = (instruction, value, mode, flags) -> { 439 Object valueObject = value; 440 if (isRegister(value)) { // Canonicalize registers 441 valueObject = asRegister(value); 442 } 443 if (!inputs.add(valueObject)) { 444 overlap = true; 445 } 446 }; 447 448 public void start(LIRInstruction initial) { 449 inputs.clear(); 450 overlap = false; 451 initial.visitEachInput(valueConsumer); 452 initial.visitEachTemp(valueConsumer); 453 initial.visitEachAlive(valueConsumer); 454 } 455 456 /** 457 * Adds the inputs of lir instruction to the accumulator and returns, true if there was any 458 * overlap of parameters. 459 * 460 * @param inst 461 * @return true if an overlap was found 462 */ 463 public boolean add(LIRInstruction inst) { 464 overlap = false; 465 inst.visitEachOutput(valueConsumer); 466 inst.visitEachTemp(valueConsumer); 467 inst.visitEachInput(valueConsumer); 468 inst.visitEachAlive(valueConsumer); 469 return overlap; 470 } 471 } 472 473 @Override 474 public RegisterAllocationConfig newRegisterAllocationConfig(RegisterConfig registerConfig) { 475 RegisterConfig registerConfigNonNull = registerConfig == null ? getCodeCache().getRegisterConfig() : registerConfig; 476 return new RegisterAllocationConfig(registerConfigNonNull); 477 } 478}