001/* 002 * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved. 003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 004 * 005 * This code is free software; you can redistribute it and/or modify it 006 * under the terms of the GNU General Public License version 2 only, as 007 * published by the Free Software Foundation. 008 * 009 * This code is distributed in the hope that it will be useful, but WITHOUT 010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 011 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 012 * version 2 for more details (a copy is included in the LICENSE file that 013 * accompanied this code). 014 * 015 * You should have received a copy of the GNU General Public License version 016 * 2 along with this work; if not, write to the Free Software Foundation, 017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 018 * 019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 020 * or visit www.oracle.com if you need additional information or have any 021 * questions. 022 */ 023 024package com.oracle.graal.compiler.amd64; 025 026import jdk.internal.jvmci.amd64.*; 027import jdk.internal.jvmci.code.*; 028import jdk.internal.jvmci.common.*; 029import com.oracle.graal.debug.*; 030import jdk.internal.jvmci.meta.*; 031import static com.oracle.graal.asm.amd64.AMD64Assembler.AMD64BinaryArithmetic.*; 032import static com.oracle.graal.asm.amd64.AMD64Assembler.AMD64RMOp.*; 033import static com.oracle.graal.asm.amd64.AMD64Assembler.OperandSize.*; 034 035import com.oracle.graal.asm.*; 036import com.oracle.graal.asm.amd64.AMD64Assembler.*; 037import com.oracle.graal.compiler.common.calc.*; 038import com.oracle.graal.compiler.gen.*; 039import com.oracle.graal.compiler.match.*; 040import com.oracle.graal.lir.*; 041import com.oracle.graal.lir.amd64.*; 042import com.oracle.graal.lir.amd64.AMD64ControlFlow.BranchOp; 043import com.oracle.graal.lir.gen.*; 044import com.oracle.graal.nodes.*; 045import com.oracle.graal.nodes.calc.*; 046import com.oracle.graal.nodes.extended.*; 047import com.oracle.graal.nodes.memory.*; 048 049public abstract class AMD64NodeLIRBuilder extends NodeLIRBuilder { 050 051 public AMD64NodeLIRBuilder(StructuredGraph graph, LIRGeneratorTool gen) { 052 super(graph, gen); 053 } 054 055 @Override 056 protected void emitIndirectCall(IndirectCallTargetNode callTarget, Value result, Value[] parameters, Value[] temps, LIRFrameState callState) { 057 Value targetAddressSrc = operand(callTarget.computedAddress()); 058 AllocatableValue targetAddress = AMD64.rax.asValue(targetAddressSrc.getLIRKind()); 059 gen.emitMove(targetAddress, targetAddressSrc); 060 append(new AMD64Call.IndirectCallOp(callTarget.targetMethod(), result, parameters, temps, targetAddress, callState)); 061 } 062 063 @Override 064 protected boolean peephole(ValueNode valueNode) { 065 if ((valueNode instanceof IntegerDivNode) || (valueNode instanceof IntegerRemNode)) { 066 FixedBinaryNode divRem = (FixedBinaryNode) valueNode; 067 FixedNode node = divRem.next(); 068 while (true) { 069 if (node instanceof IfNode) { 070 IfNode ifNode = (IfNode) node; 071 double probability = ifNode.getTrueSuccessorProbability(); 072 if (probability == 1.0) { 073 node = ifNode.trueSuccessor(); 074 } else if (probability == 0.0) { 075 node = ifNode.falseSuccessor(); 076 } else { 077 break; 078 } 079 } else if (!(node instanceof FixedWithNextNode)) { 080 break; 081 } 082 083 FixedWithNextNode fixedWithNextNode = (FixedWithNextNode) node; 084 if (((fixedWithNextNode instanceof IntegerDivNode) || (fixedWithNextNode instanceof IntegerRemNode)) && fixedWithNextNode.getClass() != divRem.getClass()) { 085 FixedBinaryNode otherDivRem = (FixedBinaryNode) fixedWithNextNode; 086 if (otherDivRem.getX() == divRem.getX() && otherDivRem.getY() == divRem.getY() && !hasOperand(otherDivRem)) { 087 Value[] results = ((AMD64LIRGenerator) gen).emitIntegerDivRem(operand(divRem.getX()), operand(divRem.getY()), state((DeoptimizingNode) valueNode)); 088 if (divRem instanceof IntegerDivNode) { 089 setResult(divRem, results[0]); 090 setResult(otherDivRem, results[1]); 091 } else { 092 setResult(divRem, results[1]); 093 setResult(otherDivRem, results[0]); 094 } 095 return true; 096 } 097 } 098 node = fixedWithNextNode.next(); 099 } 100 } 101 return false; 102 } 103 104 protected LIRFrameState getState(Access access) { 105 if (access instanceof DeoptimizingNode) { 106 return state((DeoptimizingNode) access); 107 } 108 return null; 109 } 110 111 protected Kind getMemoryKind(Access access) { 112 return (Kind) gen.getLIRKind(access.asNode().stamp()).getPlatformKind(); 113 } 114 115 protected OperandSize getMemorySize(Access access) { 116 switch (getMemoryKind(access)) { 117 case Boolean: 118 case Byte: 119 return OperandSize.BYTE; 120 case Char: 121 case Short: 122 return OperandSize.WORD; 123 case Int: 124 return OperandSize.DWORD; 125 case Long: 126 return OperandSize.QWORD; 127 case Float: 128 return OperandSize.SS; 129 case Double: 130 return OperandSize.SD; 131 default: 132 throw JVMCIError.shouldNotReachHere("unsupported memory access type " + getMemoryKind(access)); 133 } 134 } 135 136 protected ValueNode uncast(ValueNode value) { 137 if (value instanceof UnsafeCastNode) { 138 UnsafeCastNode cast = (UnsafeCastNode) value; 139 return cast.getOriginalNode(); 140 } 141 return value; 142 } 143 144 protected ComplexMatchResult emitCompareBranchMemory(IfNode ifNode, CompareNode compare, ValueNode value, Access access) { 145 Condition cond = compare.condition(); 146 Kind kind = getMemoryKind(access); 147 148 if (value.isConstant()) { 149 JavaConstant constant = value.asJavaConstant(); 150 if (kind == Kind.Long && !NumUtil.isInt(constant.asLong())) { 151 // Only imm32 as long 152 return null; 153 } 154 if (kind.isNumericFloat()) { 155 Debug.log("Skipping constant compares for float kinds"); 156 return null; 157 } 158 if (kind == Kind.Object) { 159 if (!constant.isNull()) { 160 Debug.log("Skipping constant compares for Object kinds"); 161 return null; 162 } 163 } 164 } else { 165 if (kind == Kind.Object) { 166 // Can't compare against objects since they require encode/decode 167 Debug.log("Skipping compares for Object kinds"); 168 return null; 169 } 170 } 171 172 // emitCompareBranchMemory expects the memory on the right, so mirror the condition if 173 // that's not true. It might be mirrored again the actual compare is emitted but that's 174 // ok. 175 Condition finalCondition = uncast(compare.getX()) == access ? cond.mirror() : cond; 176 return new ComplexMatchResult() { 177 public Value evaluate(NodeLIRBuilder builder) { 178 LabelRef trueLabel = getLIRBlock(ifNode.trueSuccessor()); 179 LabelRef falseLabel = getLIRBlock(ifNode.falseSuccessor()); 180 boolean unorderedIsTrue = compare.unorderedIsTrue(); 181 double trueLabelProbability = ifNode.probability(ifNode.trueSuccessor()); 182 Value other; 183 if (value.isConstant()) { 184 other = value.asJavaConstant(); 185 } else { 186 other = operand(value); 187 } 188 189 AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); 190 getLIRGeneratorTool().emitCompareBranchMemory(kind, other, address, getState(access), finalCondition, unorderedIsTrue, trueLabel, falseLabel, trueLabelProbability); 191 return null; 192 } 193 }; 194 } 195 196 private ComplexMatchResult emitIntegerTestBranchMemory(IfNode x, ValueNode value, Access access) { 197 LabelRef trueLabel = getLIRBlock(x.trueSuccessor()); 198 LabelRef falseLabel = getLIRBlock(x.falseSuccessor()); 199 double trueLabelProbability = x.probability(x.trueSuccessor()); 200 Kind kind = getMemoryKind(access); 201 OperandSize size = kind == Kind.Long ? QWORD : DWORD; 202 if (value.isConstant()) { 203 if (kind != kind.getStackKind()) { 204 return null; 205 } 206 JavaConstant constant = value.asJavaConstant(); 207 if (kind == Kind.Long && !NumUtil.isInt(constant.asLong())) { 208 // Only imm32 as long 209 return null; 210 } 211 return builder -> { 212 AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); 213 gen.append(new AMD64BinaryConsumer.MemoryConstOp(AMD64MIOp.TEST, size, address, (int) constant.asLong(), getState(access))); 214 gen.append(new BranchOp(Condition.EQ, trueLabel, falseLabel, trueLabelProbability)); 215 return null; 216 }; 217 } else { 218 return builder -> { 219 AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); 220 gen.append(new AMD64BinaryConsumer.MemoryRMOp(AMD64RMOp.TEST, size, gen.asAllocatable(operand(value)), address, getState(access))); 221 gen.append(new BranchOp(Condition.EQ, trueLabel, falseLabel, trueLabelProbability)); 222 return null; 223 }; 224 } 225 } 226 227 protected ComplexMatchResult emitConvertMemoryOp(PlatformKind kind, AMD64RMOp op, OperandSize size, Access access) { 228 return builder -> { 229 AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); 230 LIRFrameState state = getState(access); 231 return getLIRGeneratorTool().emitConvertMemoryOp(kind, op, size, address, state); 232 }; 233 } 234 235 private ComplexMatchResult emitSignExtendMemory(Access access, int fromBits, int toBits) { 236 assert fromBits <= toBits && toBits <= 64; 237 Kind kind = null; 238 AMD64RMOp op; 239 OperandSize size; 240 if (fromBits == toBits) { 241 return null; 242 } else if (toBits > 32) { 243 kind = Kind.Long; 244 size = QWORD; 245 // sign extend to 64 bits 246 switch (fromBits) { 247 case 8: 248 op = MOVSXB; 249 break; 250 case 16: 251 op = MOVSX; 252 break; 253 case 32: 254 op = MOVSXD; 255 break; 256 default: 257 throw JVMCIError.unimplemented("unsupported sign extension (" + fromBits + " bit -> " + toBits + " bit)"); 258 } 259 } else { 260 kind = Kind.Int; 261 size = DWORD; 262 // sign extend to 32 bits (smaller values are internally represented as 32 bit values) 263 switch (fromBits) { 264 case 8: 265 op = MOVSXB; 266 break; 267 case 16: 268 op = MOVSX; 269 break; 270 case 32: 271 return null; 272 default: 273 throw JVMCIError.unimplemented("unsupported sign extension (" + fromBits + " bit -> " + toBits + " bit)"); 274 } 275 } 276 if (kind != null && op != null) { 277 return emitConvertMemoryOp(kind, op, size, access); 278 } 279 return null; 280 } 281 282 private Value emitReinterpretMemory(LIRKind to, Access access) { 283 AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); 284 LIRFrameState state = getState(access); 285 return getLIRGeneratorTool().emitLoad(to, address, state); 286 } 287 288 @MatchRule("(If (IntegerTest Read=access value))") 289 @MatchRule("(If (IntegerTest FloatingRead=access value))") 290 public ComplexMatchResult integerTestBranchMemory(IfNode root, Access access, ValueNode value) { 291 return emitIntegerTestBranchMemory(root, value, access); 292 } 293 294 @MatchRule("(If (IntegerEquals=compare value Read=access))") 295 @MatchRule("(If (IntegerLessThan=compare value Read=access))") 296 @MatchRule("(If (IntegerBelow=compare value Read=access))") 297 @MatchRule("(If (IntegerEquals=compare value FloatingRead=access))") 298 @MatchRule("(If (IntegerLessThan=compare value FloatingRead=access))") 299 @MatchRule("(If (IntegerBelow=compare value FloatingRead=access))") 300 @MatchRule("(If (FloatEquals=compare value Read=access))") 301 @MatchRule("(If (FloatEquals=compare value FloatingRead=access))") 302 @MatchRule("(If (FloatLessThan=compare value Read=access))") 303 @MatchRule("(If (FloatLessThan=compare value FloatingRead=access))") 304 public ComplexMatchResult ifCompareMemory(IfNode root, CompareNode compare, ValueNode value, Access access) { 305 return emitCompareBranchMemory(root, compare, value, access); 306 } 307 308 @MatchRule("(Or (LeftShift=lshift value Constant) (UnsignedRightShift=rshift value Constant))") 309 public ComplexMatchResult rotateLeftConstant(LeftShiftNode lshift, UnsignedRightShiftNode rshift) { 310 if ((lshift.getShiftAmountMask() & (lshift.getY().asJavaConstant().asInt() + rshift.getY().asJavaConstant().asInt())) == 0) { 311 return builder -> getLIRGeneratorTool().emitRol(operand(lshift.getX()), operand(lshift.getY())); 312 } 313 return null; 314 } 315 316 @MatchRule("(Or (LeftShift value (Sub Constant=delta shiftAmount)) (UnsignedRightShift value shiftAmount))") 317 public ComplexMatchResult rotateRightVariable(ValueNode value, ConstantNode delta, ValueNode shiftAmount) { 318 if (delta.asJavaConstant().asLong() == 0 || delta.asJavaConstant().asLong() == 32) { 319 return builder -> getLIRGeneratorTool().emitRor(operand(value), operand(shiftAmount)); 320 } 321 return null; 322 } 323 324 @MatchRule("(Or (LeftShift value shiftAmount) (UnsignedRightShift value (Sub Constant=delta shiftAmount)))") 325 public ComplexMatchResult rotateLeftVariable(ValueNode value, ValueNode shiftAmount, ConstantNode delta) { 326 if (delta.asJavaConstant().asLong() == 0 || delta.asJavaConstant().asLong() == 32) { 327 return builder -> getLIRGeneratorTool().emitRol(operand(value), operand(shiftAmount)); 328 } 329 return null; 330 } 331 332 private ComplexMatchResult binaryRead(AMD64RMOp op, OperandSize size, ValueNode value, Access access) { 333 return builder -> getLIRGeneratorTool().emitBinaryMemory(op, size, getLIRGeneratorTool().asAllocatable(operand(value)), (AMD64AddressValue) operand(access.getAddress()), getState(access)); 334 } 335 336 @MatchRule("(Add value Read=access)") 337 @MatchRule("(Add value FloatingRead=access)") 338 public ComplexMatchResult addMemory(ValueNode value, Access access) { 339 OperandSize size = getMemorySize(access); 340 if (size.isXmmType()) { 341 return binaryRead(SSEOp.ADD, size, value, access); 342 } else { 343 return binaryRead(ADD.getRMOpcode(size), size, value, access); 344 } 345 } 346 347 @MatchRule("(Sub value Read=access)") 348 @MatchRule("(Sub value FloatingRead=access)") 349 public ComplexMatchResult subMemory(ValueNode value, Access access) { 350 OperandSize size = getMemorySize(access); 351 if (size.isXmmType()) { 352 return binaryRead(SSEOp.SUB, size, value, access); 353 } else { 354 return binaryRead(SUB.getRMOpcode(size), size, value, access); 355 } 356 } 357 358 @MatchRule("(Mul value Read=access)") 359 @MatchRule("(Mul value FloatingRead=access)") 360 public ComplexMatchResult mulMemory(ValueNode value, Access access) { 361 OperandSize size = getMemorySize(access); 362 if (size.isXmmType()) { 363 return binaryRead(SSEOp.MUL, size, value, access); 364 } else { 365 return binaryRead(AMD64RMOp.IMUL, size, value, access); 366 } 367 } 368 369 @MatchRule("(And value Read=access)") 370 @MatchRule("(And value FloatingRead=access)") 371 public ComplexMatchResult andMemory(ValueNode value, Access access) { 372 OperandSize size = getMemorySize(access); 373 if (size.isXmmType()) { 374 return null; 375 } else { 376 return binaryRead(AND.getRMOpcode(size), size, value, access); 377 } 378 } 379 380 @MatchRule("(Or value Read=access)") 381 @MatchRule("(Or value FloatingRead=access)") 382 public ComplexMatchResult orMemory(ValueNode value, Access access) { 383 OperandSize size = getMemorySize(access); 384 if (size.isXmmType()) { 385 return null; 386 } else { 387 return binaryRead(OR.getRMOpcode(size), size, value, access); 388 } 389 } 390 391 @MatchRule("(Xor value Read=access)") 392 @MatchRule("(Xor value FloatingRead=access)") 393 public ComplexMatchResult xorMemory(ValueNode value, Access access) { 394 OperandSize size = getMemorySize(access); 395 if (size.isXmmType()) { 396 return null; 397 } else { 398 return binaryRead(XOR.getRMOpcode(size), size, value, access); 399 } 400 } 401 402 @MatchRule("(Write object Narrow=narrow)") 403 public ComplexMatchResult writeNarrow(WriteNode root, NarrowNode narrow) { 404 return builder -> { 405 LIRKind writeKind = getLIRGeneratorTool().getLIRKind(root.value().stamp()); 406 getLIRGeneratorTool().emitStore(writeKind, operand(root.getAddress()), operand(narrow.getValue()), state(root)); 407 return null; 408 }; 409 } 410 411 @MatchRule("(SignExtend Read=access)") 412 @MatchRule("(SignExtend FloatingRead=access)") 413 public ComplexMatchResult signExtend(SignExtendNode root, Access access) { 414 return emitSignExtendMemory(access, root.getInputBits(), root.getResultBits()); 415 } 416 417 @MatchRule("(ZeroExtend Read=access)") 418 @MatchRule("(ZeroExtend FloatingRead=access)") 419 public ComplexMatchResult zeroExtend(ZeroExtendNode root, Access access) { 420 Kind memoryKind = getMemoryKind(access); 421 if (memoryKind.getBitCount() != root.getInputBits() && !memoryKind.isUnsigned()) { 422 /* 423 * The memory being read from is signed and smaller than the result size so this is a 424 * sign extension to inputBits followed by a zero extension to resultBits which can't be 425 * expressed in a memory operation. 426 */ 427 return null; 428 } 429 return builder -> getLIRGeneratorTool().emitZeroExtendMemory(memoryKind == Kind.Short ? Kind.Char : memoryKind, root.getResultBits(), (AMD64AddressValue) operand(access.getAddress()), 430 getState(access)); 431 } 432 433 @MatchRule("(FloatConvert Read=access)") 434 @MatchRule("(FloatConvert FloatingRead=access)") 435 public ComplexMatchResult floatConvert(FloatConvertNode root, Access access) { 436 switch (root.getFloatConvert()) { 437 case D2F: 438 return emitConvertMemoryOp(Kind.Float, SSEOp.CVTSD2SS, SD, access); 439 case D2I: 440 return emitConvertMemoryOp(Kind.Int, SSEOp.CVTTSD2SI, DWORD, access); 441 case D2L: 442 return emitConvertMemoryOp(Kind.Long, SSEOp.CVTTSD2SI, QWORD, access); 443 case F2D: 444 return emitConvertMemoryOp(Kind.Double, SSEOp.CVTSS2SD, SS, access); 445 case F2I: 446 return emitConvertMemoryOp(Kind.Int, SSEOp.CVTTSS2SI, DWORD, access); 447 case F2L: 448 return emitConvertMemoryOp(Kind.Long, SSEOp.CVTTSS2SI, QWORD, access); 449 case I2D: 450 return emitConvertMemoryOp(Kind.Double, SSEOp.CVTSI2SD, DWORD, access); 451 case I2F: 452 return emitConvertMemoryOp(Kind.Float, SSEOp.CVTSI2SS, DWORD, access); 453 case L2D: 454 return emitConvertMemoryOp(Kind.Double, SSEOp.CVTSI2SD, QWORD, access); 455 case L2F: 456 return emitConvertMemoryOp(Kind.Float, SSEOp.CVTSI2SS, QWORD, access); 457 default: 458 throw JVMCIError.shouldNotReachHere(); 459 } 460 } 461 462 @MatchRule("(Reinterpret Read=access)") 463 @MatchRule("(Reinterpret FloatingRead=access)") 464 public ComplexMatchResult reinterpret(ReinterpretNode root, Access access) { 465 return builder -> { 466 LIRKind kind = getLIRGeneratorTool().getLIRKind(root.stamp()); 467 return emitReinterpretMemory(kind, access); 468 }; 469 470 } 471 472 @Override 473 public void visitBreakpointNode(BreakpointNode node) { 474 JavaType[] sig = new JavaType[node.arguments().size()]; 475 for (int i = 0; i < sig.length; i++) { 476 sig[i] = node.arguments().get(i).stamp().javaType(gen.getMetaAccess()); 477 } 478 479 Value[] parameters = visitInvokeArguments(gen.getResult().getFrameMapBuilder().getRegisterConfig().getCallingConvention(CallingConvention.Type.JavaCall, null, sig, gen.target(), false), 480 node.arguments()); 481 append(new AMD64BreakpointOp(parameters)); 482 } 483 484 @Override 485 public AMD64LIRGenerator getLIRGeneratorTool() { 486 return (AMD64LIRGenerator) gen; 487 } 488}