001/* 002 * Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved. 003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 004 * 005 * This code is free software; you can redistribute it and/or modify it 006 * under the terms of the GNU General Public License version 2 only, as 007 * published by the Free Software Foundation. 008 * 009 * This code is distributed in the hope that it will be useful, but WITHOUT 010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 011 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 012 * version 2 for more details (a copy is included in the LICENSE file that 013 * accompanied this code). 014 * 015 * You should have received a copy of the GNU General Public License version 016 * 2 along with this work; if not, write to the Free Software Foundation, 017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 018 * 019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 020 * or visit www.oracle.com if you need additional information or have any 021 * questions. 022 */ 023package com.oracle.graal.hotspot.amd64; 024 025import static com.oracle.graal.asm.amd64.AMD64Assembler.AMD64BinaryArithmetic.*; 026import static com.oracle.graal.asm.amd64.AMD64Assembler.AMD64RMOp.*; 027import static com.oracle.graal.asm.amd64.AMD64Assembler.OperandSize.*; 028import static com.oracle.graal.hotspot.HotSpotBackend.*; 029import static jdk.internal.jvmci.amd64.AMD64.*; 030 031import java.util.*; 032 033import jdk.internal.jvmci.amd64.*; 034import jdk.internal.jvmci.code.*; 035import jdk.internal.jvmci.common.*; 036import com.oracle.graal.debug.*; 037import jdk.internal.jvmci.hotspot.*; 038import jdk.internal.jvmci.hotspot.HotSpotVMConfig.*; 039import jdk.internal.jvmci.meta.*; 040 041import com.oracle.graal.asm.amd64.AMD64Address.Scale; 042import com.oracle.graal.asm.amd64.AMD64Assembler.AMD64MIOp; 043import com.oracle.graal.asm.amd64.AMD64Assembler.OperandSize; 044import com.oracle.graal.compiler.amd64.*; 045import com.oracle.graal.compiler.common.*; 046import com.oracle.graal.compiler.common.spi.*; 047import com.oracle.graal.hotspot.*; 048import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.StoreRbpOp; 049import com.oracle.graal.hotspot.debug.*; 050import com.oracle.graal.hotspot.meta.*; 051import com.oracle.graal.hotspot.stubs.*; 052import com.oracle.graal.lir.*; 053import com.oracle.graal.lir.StandardOp.NoOp; 054import com.oracle.graal.lir.StandardOp.SaveRegistersOp; 055import com.oracle.graal.lir.amd64.*; 056import com.oracle.graal.lir.amd64.AMD64Move.LeaDataOp; 057import com.oracle.graal.lir.amd64.AMD64Move.MoveFromRegOp; 058import com.oracle.graal.lir.asm.*; 059import com.oracle.graal.lir.framemap.*; 060import com.oracle.graal.lir.gen.*; 061 062/** 063 * LIR generator specialized for AMD64 HotSpot. 064 */ 065public class AMD64HotSpotLIRGenerator extends AMD64LIRGenerator implements HotSpotLIRGenerator { 066 067 final HotSpotVMConfig config; 068 private HotSpotLockStack lockStack; 069 070 protected AMD64HotSpotLIRGenerator(HotSpotProviders providers, HotSpotVMConfig config, CallingConvention cc, LIRGenerationResult lirGenRes) { 071 this(new DefaultLIRKindTool(providers.getCodeCache().getTarget().wordKind), providers, config, cc, lirGenRes); 072 } 073 074 protected AMD64HotSpotLIRGenerator(LIRKindTool lirKindTool, HotSpotProviders providers, HotSpotVMConfig config, CallingConvention cc, LIRGenerationResult lirGenRes) { 075 super(lirKindTool, providers, cc, lirGenRes); 076 assert config.basicLockSize == 8; 077 this.config = config; 078 } 079 080 @Override 081 public HotSpotProviders getProviders() { 082 return (HotSpotProviders) super.getProviders(); 083 } 084 085 /** 086 * Utility for emitting the instruction to save RBP. 087 */ 088 class SaveRbp { 089 090 private final NoOp placeholder; 091 092 /** 093 * The slot reserved for saving RBP. 094 */ 095 private final StackSlot reservedSlot; 096 /** 097 * The variable reserved for saving RBP. 098 * 099 * This should be either allocated to RBP, or to {@link #reservedSlot}. 100 */ 101 private final AllocatableValue rescueSlot; 102 103 public SaveRbp(NoOp placeholder0) { 104 this.placeholder = placeholder0; 105 AMD64FrameMapBuilder frameMapBuilder = (AMD64FrameMapBuilder) getResult().getFrameMapBuilder(); 106 this.reservedSlot = frameMapBuilder.allocateRBPSpillSlot(); 107 this.rescueSlot = newVariable(LIRKind.value(Kind.Long)); 108 } 109 110 /** 111 * Replaces this operation with the appropriate move for saving rbp. 112 * 113 * @param useStack specifies if rbp must be saved to the stack 114 */ 115 public void finalize(boolean useStack) { 116 RegisterValue rbpValue = rbp.asValue(LIRKind.value(Kind.Long)); 117 LIRInstruction move; 118 if (useStack) { 119 move = new StoreRbpOp(rescueSlot, rbpValue, reservedSlot); 120 } else { 121 ((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).freeRBPSpillSlot(); 122 move = new MoveFromRegOp(Kind.Long, rescueSlot, rbpValue); 123 } 124 placeholder.replace(getResult().getLIR(), move); 125 126 } 127 128 AllocatableValue getRbpRescueSlot() { 129 return rescueSlot; 130 } 131 } 132 133 private SaveRbp saveRbp; 134 135 protected void emitSaveRbp() { 136 NoOp placeholder = new NoOp(getCurrentBlock(), getResult().getLIR().getLIRforBlock(getCurrentBlock()).size()); 137 append(placeholder); 138 saveRbp = new SaveRbp(placeholder); 139 } 140 141 protected SaveRbp getSaveRbp() { 142 return saveRbp; 143 } 144 145 /** 146 * Helper instruction to reserve a stack slot for the whole method. Note that the actual users 147 * of the stack slot might be inserted after stack slot allocation. This dummy instruction 148 * ensures that the stack slot is alive and gets a real stack slot assigned. 149 */ 150 private static final class RescueSlotDummyOp extends LIRInstruction { 151 public static final LIRInstructionClass<RescueSlotDummyOp> TYPE = LIRInstructionClass.create(RescueSlotDummyOp.class); 152 153 @Alive({OperandFlag.STACK, OperandFlag.UNINITIALIZED}) private StackSlotValue slot; 154 155 public RescueSlotDummyOp(FrameMapBuilder frameMapBuilder, LIRKind kind) { 156 super(TYPE); 157 slot = frameMapBuilder.allocateSpillSlot(kind); 158 } 159 160 public StackSlotValue getSlot() { 161 return slot; 162 } 163 164 @Override 165 public void emitCode(CompilationResultBuilder crb) { 166 } 167 } 168 169 private RescueSlotDummyOp rescueSlotOp; 170 171 private StackSlotValue getOrInitRescueSlot() { 172 RescueSlotDummyOp op = getOrInitRescueSlotOp(); 173 return op.getSlot(); 174 } 175 176 private RescueSlotDummyOp getOrInitRescueSlotOp() { 177 if (rescueSlotOp == null) { 178 // create dummy instruction to keep the rescue slot alive 179 rescueSlotOp = new RescueSlotDummyOp(getResult().getFrameMapBuilder(), getLIRKindTool().getWordKind()); 180 } 181 return rescueSlotOp; 182 } 183 184 @Override 185 public StackSlotValue getLockSlot(int lockDepth) { 186 return getLockStack().makeLockSlot(lockDepth); 187 } 188 189 private HotSpotLockStack getLockStack() { 190 assert lockStack != null; 191 return lockStack; 192 } 193 194 protected void setLockStack(HotSpotLockStack lockStack) { 195 assert this.lockStack == null; 196 this.lockStack = lockStack; 197 } 198 199 private Register findPollOnReturnScratchRegister() { 200 RegisterConfig regConfig = getProviders().getCodeCache().getRegisterConfig(); 201 for (Register r : regConfig.getAllocatableRegisters()) { 202 if (!r.equals(regConfig.getReturnRegister(Kind.Long)) && !r.equals(AMD64.rbp)) { 203 return r; 204 } 205 } 206 throw JVMCIError.shouldNotReachHere(); 207 } 208 209 private Register pollOnReturnScratchRegister; 210 211 @Override 212 public void emitReturn(Value input) { 213 AllocatableValue operand = Value.ILLEGAL; 214 if (input != null) { 215 operand = resultOperandFor(input.getLIRKind()); 216 emitMove(operand, input); 217 } 218 if (pollOnReturnScratchRegister == null) { 219 pollOnReturnScratchRegister = findPollOnReturnScratchRegister(); 220 } 221 append(new AMD64HotSpotReturnOp(operand, getStub() != null, pollOnReturnScratchRegister, config, saveRbp.getRbpRescueSlot())); 222 } 223 224 @Override 225 public boolean needOnlyOopMaps() { 226 // Stubs only need oop maps 227 return ((AMD64HotSpotLIRGenerationResult) getResult()).getStub() != null; 228 } 229 230 @Override 231 public void emitData(AllocatableValue dst, byte[] data) { 232 append(new LeaDataOp(dst, data)); 233 } 234 235 private LIRFrameState currentRuntimeCallInfo; 236 237 @Override 238 protected void emitForeignCallOp(ForeignCallLinkage linkage, Value result, Value[] arguments, Value[] temps, LIRFrameState info) { 239 currentRuntimeCallInfo = info; 240 super.emitForeignCallOp(linkage, result, arguments, temps, info); 241 } 242 243 public void emitLeaveCurrentStackFrame(SaveRegistersOp saveRegisterOp) { 244 append(new AMD64HotSpotLeaveCurrentStackFrameOp(saveRegisterOp, saveRbp.getRbpRescueSlot())); 245 } 246 247 public void emitLeaveDeoptimizedStackFrame(Value frameSize, Value initialInfo) { 248 Variable frameSizeVariable = load(frameSize); 249 Variable initialInfoVariable = load(initialInfo); 250 append(new AMD64HotSpotLeaveDeoptimizedStackFrameOp(frameSizeVariable, initialInfoVariable, saveRbp.getRbpRescueSlot())); 251 } 252 253 public void emitEnterUnpackFramesStackFrame(Value framePc, Value senderSp, Value senderFp, SaveRegistersOp saveRegisterOp) { 254 Register threadRegister = getProviders().getRegisters().getThreadRegister(); 255 Variable framePcVariable = load(framePc); 256 Variable senderSpVariable = load(senderSp); 257 Variable senderFpVariable = load(senderFp); 258 append(new AMD64HotSpotEnterUnpackFramesStackFrameOp(threadRegister, config.threadLastJavaSpOffset(), config.threadLastJavaPcOffset(), config.threadLastJavaFpOffset(), framePcVariable, 259 senderSpVariable, senderFpVariable, saveRegisterOp)); 260 } 261 262 public void emitLeaveUnpackFramesStackFrame(SaveRegistersOp saveRegisterOp) { 263 Register threadRegister = getProviders().getRegisters().getThreadRegister(); 264 append(new AMD64HotSpotLeaveUnpackFramesStackFrameOp(threadRegister, config.threadLastJavaSpOffset(), config.threadLastJavaPcOffset(), config.threadLastJavaFpOffset(), saveRegisterOp)); 265 } 266 267 @Override 268 public Value emitCardTableShift() { 269 Variable result = newVariable(LIRKind.value(Kind.Long)); 270 append(new AMD64HotSpotCardTableShiftOp(result, config)); 271 return result; 272 } 273 274 @Override 275 public Value emitCardTableAddress() { 276 Variable result = newVariable(LIRKind.value(Kind.Long)); 277 append(new AMD64HotSpotCardTableAddressOp(result, config)); 278 return result; 279 } 280 281 /** 282 * @param savedRegisters the registers saved by this operation which may be subject to pruning 283 * @param savedRegisterLocations the slots to which the registers are saved 284 * @param supportsRemove determines if registers can be pruned 285 */ 286 protected AMD64SaveRegistersOp emitSaveRegisters(Register[] savedRegisters, StackSlotValue[] savedRegisterLocations, boolean supportsRemove) { 287 AMD64SaveRegistersOp save = new AMD64SaveRegistersOp(savedRegisters, savedRegisterLocations, supportsRemove); 288 append(save); 289 return save; 290 } 291 292 /** 293 * Adds a node to the graph that saves all allocatable registers to the stack. 294 * 295 * @param supportsRemove determines if registers can be pruned 296 * @return the register save node 297 */ 298 private AMD64SaveRegistersOp emitSaveAllRegisters(Register[] savedRegisters, boolean supportsRemove) { 299 StackSlotValue[] savedRegisterLocations = new StackSlotValue[savedRegisters.length]; 300 for (int i = 0; i < savedRegisters.length; i++) { 301 PlatformKind kind = target().arch.getLargestStorableKind(savedRegisters[i].getRegisterCategory()); 302 assert kind != Kind.Illegal; 303 VirtualStackSlot spillSlot = getResult().getFrameMapBuilder().allocateSpillSlot(LIRKind.value(kind)); 304 savedRegisterLocations[i] = spillSlot; 305 } 306 return emitSaveRegisters(savedRegisters, savedRegisterLocations, supportsRemove); 307 } 308 309 @Override 310 public SaveRegistersOp emitSaveAllRegisters() { 311 // We are saving all registers. 312 // TODO Save upper half of YMM registers. 313 return emitSaveAllRegisters(cpuxmmRegisters, false); 314 } 315 316 protected void emitRestoreRegisters(AMD64SaveRegistersOp save) { 317 append(new AMD64RestoreRegistersOp(save.getSlots().clone(), save)); 318 } 319 320 /** 321 * Gets the {@link Stub} this generator is generating code for or {@code null} if a stub is not 322 * being generated. 323 */ 324 public Stub getStub() { 325 return ((AMD64HotSpotLIRGenerationResult) getResult()).getStub(); 326 } 327 328 @Override 329 public Variable emitForeignCall(ForeignCallLinkage linkage, LIRFrameState state, Value... args) { 330 HotSpotForeignCallLinkage hotspotLinkage = (HotSpotForeignCallLinkage) linkage; 331 boolean destroysRegisters = hotspotLinkage.destroysRegisters(); 332 333 AMD64SaveRegistersOp save = null; 334 Stub stub = getStub(); 335 if (destroysRegisters) { 336 if (stub != null && stub.preservesRegisters()) { 337 Register[] savedRegisters = getResult().getFrameMapBuilder().getRegisterConfig().getAllocatableRegisters(); 338 save = emitSaveAllRegisters(savedRegisters, true); 339 } 340 } 341 342 Variable result; 343 LIRFrameState debugInfo = null; 344 if (hotspotLinkage.needsDebugInfo()) { 345 debugInfo = state; 346 assert debugInfo != null || stub != null; 347 } 348 349 if (hotspotLinkage.needsJavaFrameAnchor()) { 350 Register thread = getProviders().getRegisters().getThreadRegister(); 351 append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread)); 352 result = super.emitForeignCall(hotspotLinkage, debugInfo, args); 353 append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), thread)); 354 } else { 355 result = super.emitForeignCall(hotspotLinkage, debugInfo, args); 356 } 357 358 if (destroysRegisters) { 359 if (stub != null) { 360 if (stub.preservesRegisters()) { 361 AMD64HotSpotLIRGenerationResult generationResult = (AMD64HotSpotLIRGenerationResult) getResult(); 362 assert !generationResult.getCalleeSaveInfo().containsKey(currentRuntimeCallInfo); 363 generationResult.getCalleeSaveInfo().put(currentRuntimeCallInfo, save); 364 emitRestoreRegisters(save); 365 } else { 366 assert zapRegisters(); 367 } 368 } 369 } 370 371 return result; 372 } 373 374 public Value emitUncommonTrapCall(Value trapRequest, SaveRegistersOp saveRegisterOp) { 375 ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(UNCOMMON_TRAP); 376 377 Register thread = getProviders().getRegisters().getThreadRegister(); 378 append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread)); 379 Variable result = super.emitForeignCall(linkage, null, thread.asValue(LIRKind.value(Kind.Long)), trapRequest); 380 append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), thread)); 381 382 Map<LIRFrameState, SaveRegistersOp> calleeSaveInfo = ((AMD64HotSpotLIRGenerationResult) getResult()).getCalleeSaveInfo(); 383 assert !calleeSaveInfo.containsKey(currentRuntimeCallInfo); 384 calleeSaveInfo.put(currentRuntimeCallInfo, saveRegisterOp); 385 386 return result; 387 } 388 389 public Value emitDeoptimizationFetchUnrollInfoCall(SaveRegistersOp saveRegisterOp) { 390 ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(FETCH_UNROLL_INFO); 391 392 Register thread = getProviders().getRegisters().getThreadRegister(); 393 append(new AMD64HotSpotCRuntimeCallPrologueOp(config.threadLastJavaSpOffset(), thread)); 394 Variable result = super.emitForeignCall(linkage, null, thread.asValue(LIRKind.value(Kind.Long))); 395 append(new AMD64HotSpotCRuntimeCallEpilogueOp(config.threadLastJavaSpOffset(), config.threadLastJavaFpOffset(), thread)); 396 397 Map<LIRFrameState, SaveRegistersOp> calleeSaveInfo = ((AMD64HotSpotLIRGenerationResult) getResult()).getCalleeSaveInfo(); 398 assert !calleeSaveInfo.containsKey(currentRuntimeCallInfo); 399 calleeSaveInfo.put(currentRuntimeCallInfo, saveRegisterOp); 400 401 return result; 402 } 403 404 protected AMD64ZapRegistersOp emitZapRegisters(Register[] zappedRegisters, JavaConstant[] zapValues) { 405 AMD64ZapRegistersOp zap = new AMD64ZapRegistersOp(zappedRegisters, zapValues); 406 append(zap); 407 return zap; 408 } 409 410 protected boolean zapRegisters() { 411 Register[] zappedRegisters = getResult().getFrameMapBuilder().getRegisterConfig().getAllocatableRegisters(); 412 JavaConstant[] zapValues = new JavaConstant[zappedRegisters.length]; 413 for (int i = 0; i < zappedRegisters.length; i++) { 414 PlatformKind kind = target().arch.getLargestStorableKind(zappedRegisters[i].getRegisterCategory()); 415 assert kind != Kind.Illegal; 416 zapValues[i] = zapValueForKind(kind); 417 } 418 ((AMD64HotSpotLIRGenerationResult) getResult()).getCalleeSaveInfo().put(currentRuntimeCallInfo, emitZapRegisters(zappedRegisters, zapValues)); 419 return true; 420 } 421 422 @Override 423 public void emitTailcall(Value[] args, Value address) { 424 append(new AMD64TailcallOp(args, address)); 425 } 426 427 @Override 428 public void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args, int numberOfFloatingPointArguments) { 429 Value[] argLocations = new Value[args.length]; 430 getResult().getFrameMapBuilder().callsMethod(nativeCallingConvention); 431 // TODO(mg): in case a native function uses floating point varargs, the ABI requires that 432 // RAX contains the length of the varargs 433 PrimitiveConstant intConst = JavaConstant.forInt(numberOfFloatingPointArguments); 434 AllocatableValue numberOfFloatingPointArgumentsRegister = AMD64.rax.asValue(intConst.getLIRKind()); 435 emitMove(numberOfFloatingPointArgumentsRegister, intConst); 436 for (int i = 0; i < args.length; i++) { 437 Value arg = args[i]; 438 AllocatableValue loc = nativeCallingConvention.getArgument(i); 439 emitMove(loc, arg); 440 argLocations[i] = loc; 441 } 442 Value ptr = emitMove(JavaConstant.forLong(address)); 443 append(new AMD64CCall(nativeCallingConvention.getReturn(), ptr, numberOfFloatingPointArgumentsRegister, argLocations)); 444 } 445 446 @Override 447 public void emitUnwind(Value exception) { 448 ForeignCallLinkage linkage = getForeignCalls().lookupForeignCall(HotSpotBackend.UNWIND_EXCEPTION_TO_CALLER); 449 CallingConvention outgoingCc = linkage.getOutgoingCallingConvention(); 450 assert outgoingCc.getArgumentCount() == 2; 451 RegisterValue exceptionParameter = (RegisterValue) outgoingCc.getArgument(0); 452 emitMove(exceptionParameter, exception); 453 append(new AMD64HotSpotUnwindOp(exceptionParameter, saveRbp.getRbpRescueSlot())); 454 } 455 456 private void moveDeoptValuesToThread(Value actionAndReason, Value speculation) { 457 moveValueToThread(actionAndReason, config.pendingDeoptimizationOffset); 458 moveValueToThread(speculation, config.pendingFailedSpeculationOffset); 459 } 460 461 private void moveValueToThread(Value v, int offset) { 462 LIRKind wordKind = LIRKind.value(getProviders().getCodeCache().getTarget().wordKind); 463 RegisterValue thread = getProviders().getRegisters().getThreadRegister().asValue(wordKind); 464 AMD64AddressValue address = new AMD64AddressValue(wordKind, thread, offset); 465 emitStore(v.getLIRKind(), address, v, null); 466 } 467 468 @Override 469 public void emitDeoptimize(Value actionAndReason, Value speculation, LIRFrameState state) { 470 moveDeoptValuesToThread(actionAndReason, speculation); 471 append(new AMD64DeoptimizeOp(state)); 472 } 473 474 @Override 475 public void emitDeoptimizeCaller(DeoptimizationAction action, DeoptimizationReason reason) { 476 moveDeoptValuesToThread(getMetaAccess().encodeDeoptActionAndReason(action, reason, 0), JavaConstant.NULL_POINTER); 477 append(new AMD64HotSpotDeoptimizeCallerOp(saveRbp.getRbpRescueSlot())); 478 } 479 480 @Override 481 public void beforeRegisterAllocation() { 482 super.beforeRegisterAllocation(); 483 boolean hasDebugInfo = getResult().getLIR().hasDebugInfo(); 484 saveRbp.finalize(hasDebugInfo); 485 if (hasDebugInfo) { 486 ((AMD64HotSpotLIRGenerationResult) getResult()).setDeoptimizationRescueSlot(((AMD64FrameMapBuilder) getResult().getFrameMapBuilder()).allocateDeoptimizationRescueSlot()); 487 } 488 489 if (BenchmarkCounters.enabled) { 490 // ensure that the rescue slot is available 491 LIRInstruction op = getOrInitRescueSlotOp(); 492 // insert dummy instruction into the start block 493 LIR lir = getResult().getLIR(); 494 List<LIRInstruction> instructions = lir.getLIRforBlock(lir.getControlFlowGraph().getStartBlock()); 495 instructions.add(1, op); 496 Debug.dump(lir, "created rescue dummy op"); 497 } 498 } 499 500 public void emitPushInterpreterFrame(Value frameSize, Value framePc, Value senderSp, Value initialInfo) { 501 Variable frameSizeVariable = load(frameSize); 502 Variable framePcVariable = load(framePc); 503 Variable senderSpVariable = load(senderSp); 504 Variable initialInfoVariable = load(initialInfo); 505 append(new AMD64HotSpotPushInterpreterFrameOp(frameSizeVariable, framePcVariable, senderSpVariable, initialInfoVariable, config)); 506 } 507 508 @Override 509 protected void emitStoreConst(Kind kind, AMD64AddressValue address, JavaConstant value, LIRFrameState state) { 510 if (value instanceof HotSpotConstant && value.isNonNull()) { 511 HotSpotConstant c = (HotSpotConstant) value; 512 if (c.isCompressed()) { 513 assert kind == Kind.Int; 514 if (!target().inlineObjects && c instanceof HotSpotObjectConstant) { 515 emitStore(kind, address, asAllocatable(value), state); 516 } else { 517 append(new AMD64HotSpotBinaryConsumer.MemoryConstOp(AMD64MIOp.MOV, address, c, state)); 518 } 519 } else { 520 emitStore(kind, address, asAllocatable(value), state); 521 } 522 } else { 523 super.emitStoreConst(kind, address, value, state); 524 } 525 } 526 527 @Override 528 public Value emitCompress(Value pointer, CompressEncoding encoding, boolean nonNull) { 529 LIRKind inputKind = pointer.getLIRKind(); 530 assert inputKind.getPlatformKind() == Kind.Long || inputKind.getPlatformKind() == Kind.Object; 531 if (inputKind.isReference(0)) { 532 // oop 533 Variable result = newVariable(LIRKind.reference(Kind.Int)); 534 append(new AMD64HotSpotMove.CompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull)); 535 return result; 536 } else { 537 // metaspace pointer 538 Variable result = newVariable(LIRKind.value(Kind.Int)); 539 AllocatableValue base = Value.ILLEGAL; 540 if (encoding.base != 0) { 541 base = emitMove(JavaConstant.forLong(encoding.base)); 542 } 543 append(new AMD64HotSpotMove.CompressPointer(result, asAllocatable(pointer), base, encoding, nonNull)); 544 return result; 545 } 546 } 547 548 @Override 549 public Value emitUncompress(Value pointer, CompressEncoding encoding, boolean nonNull) { 550 LIRKind inputKind = pointer.getLIRKind(); 551 assert inputKind.getPlatformKind() == Kind.Int; 552 if (inputKind.isReference(0)) { 553 // oop 554 Variable result = newVariable(LIRKind.reference(Kind.Object)); 555 append(new AMD64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull)); 556 return result; 557 } else { 558 // metaspace pointer 559 Variable result = newVariable(LIRKind.value(Kind.Long)); 560 AllocatableValue base = Value.ILLEGAL; 561 if (encoding.base != 0) { 562 base = emitMove(JavaConstant.forLong(encoding.base)); 563 } 564 append(new AMD64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), base, encoding, nonNull)); 565 return result; 566 } 567 } 568 569 @Override 570 protected AMD64LIRInstruction createMove(AllocatableValue dst, Value src) { 571 if (src instanceof JavaConstant) { 572 if (HotSpotCompressedNullConstant.COMPRESSED_NULL.equals(src)) { 573 return super.createMove(dst, JavaConstant.INT_0); 574 } 575 if (src instanceof HotSpotObjectConstant) { 576 return new AMD64HotSpotMove.HotSpotLoadObjectConstantOp(dst, (HotSpotObjectConstant) src); 577 } 578 if (src instanceof HotSpotMetaspaceConstant) { 579 return new AMD64HotSpotMove.HotSpotLoadMetaspaceConstantOp(dst, (HotSpotMetaspaceConstant) src); 580 } 581 } 582 return super.createMove(dst, src); 583 } 584 585 @Override 586 public void emitNullCheck(Value address, LIRFrameState state) { 587 if (address.getLIRKind().getPlatformKind() == Kind.Int) { 588 CompressEncoding encoding = config.getOopEncoding(); 589 Value uncompressed; 590 if (encoding.shift <= 3) { 591 LIRKind wordKind = LIRKind.unknownReference(target().wordKind); 592 uncompressed = new AMD64AddressValue(wordKind, getProviders().getRegisters().getHeapBaseRegister().asValue(wordKind), asAllocatable(address), Scale.fromInt(1 << encoding.shift), 0); 593 } else { 594 uncompressed = emitUncompress(address, encoding, false); 595 } 596 append(new AMD64Move.NullCheckOp(asAddressValue(uncompressed), state)); 597 } else { 598 super.emitNullCheck(address, state); 599 } 600 } 601 602 @Override 603 protected void emitCompareOp(PlatformKind cmpKind, Variable left, Value right) { 604 if (HotSpotCompressedNullConstant.COMPRESSED_NULL.equals(right)) { 605 append(new AMD64BinaryConsumer.Op(TEST, DWORD, left, left)); 606 } else if (right instanceof HotSpotConstant) { 607 HotSpotConstant c = (HotSpotConstant) right; 608 609 boolean isImmutable = GraalOptions.ImmutableCode.getValue(); 610 boolean generatePIC = GraalOptions.GeneratePIC.getValue(); 611 if (c.isCompressed() && !(isImmutable && generatePIC)) { 612 append(new AMD64HotSpotBinaryConsumer.ConstOp(CMP.getMIOpcode(DWORD, false), left, c)); 613 } else { 614 OperandSize size = c.isCompressed() ? DWORD : QWORD; 615 append(new AMD64BinaryConsumer.DataOp(CMP.getRMOpcode(size), size, left, c)); 616 } 617 } else { 618 super.emitCompareOp(cmpKind, left, right); 619 } 620 } 621 622 @Override 623 protected boolean emitCompareMemoryConOp(OperandSize size, JavaConstant a, AMD64AddressValue b, LIRFrameState state) { 624 if (a.isNull()) { 625 append(new AMD64BinaryConsumer.MemoryConstOp(CMP, size, b, 0, state)); 626 return true; 627 } else if (a instanceof HotSpotConstant && size == DWORD) { 628 assert ((HotSpotConstant) a).isCompressed(); 629 append(new AMD64HotSpotBinaryConsumer.MemoryConstOp(CMP.getMIOpcode(size, false), b, (HotSpotConstant) a, state)); 630 return true; 631 } else { 632 return super.emitCompareMemoryConOp(size, a, b, state); 633 } 634 } 635 636 @Override 637 public boolean canInlineConstant(JavaConstant c) { 638 if (HotSpotCompressedNullConstant.COMPRESSED_NULL.equals(c)) { 639 return true; 640 } else if (c instanceof HotSpotObjectConstant) { 641 return ((HotSpotObjectConstant) c).isCompressed(); 642 } else { 643 return super.canInlineConstant(c); 644 } 645 } 646 647 @Override 648 public LIRInstruction createBenchmarkCounter(String name, String group, Value increment) { 649 if (BenchmarkCounters.enabled) { 650 return new AMD64HotSpotCounterOp(name, group, increment, getProviders().getRegisters(), config, getOrInitRescueSlot()); 651 } 652 return null; 653 } 654 655 @Override 656 public LIRInstruction createMultiBenchmarkCounter(String[] names, String[] groups, Value[] increments) { 657 if (BenchmarkCounters.enabled) { 658 return new AMD64HotSpotCounterOp(names, groups, increments, getProviders().getRegisters(), config, getOrInitRescueSlot()); 659 } 660 return null; 661 } 662 663 @Override 664 public void emitPrefetchAllocate(Value address) { 665 append(new AMD64PrefetchOp(asAddressValue(address), config.allocatePrefetchInstr)); 666 } 667 668}