001/* 002 * Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved. 003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 004 * 005 * This code is free software; you can redistribute it and/or modify it 006 * under the terms of the GNU General Public License version 2 only, as 007 * published by the Free Software Foundation. 008 * 009 * This code is distributed in the hope that it will be useful, but WITHOUT 010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 011 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 012 * version 2 for more details (a copy is included in the LICENSE file that 013 * accompanied this code). 014 * 015 * You should have received a copy of the GNU General Public License version 016 * 2 along with this work; if not, write to the Free Software Foundation, 017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 018 * 019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 020 * or visit www.oracle.com if you need additional information or have any 021 * questions. 022 */ 023package com.oracle.graal.lir.sparc; 024 025import static com.oracle.graal.asm.sparc.SPARCAssembler.*; 026import static com.oracle.graal.asm.sparc.SPARCAssembler.Annul.*; 027import static com.oracle.graal.asm.sparc.SPARCAssembler.BranchPredict.*; 028import static com.oracle.graal.asm.sparc.SPARCAssembler.CC.*; 029import static com.oracle.graal.asm.sparc.SPARCAssembler.ConditionFlag.*; 030import static com.oracle.graal.lir.LIRInstruction.OperandFlag.*; 031import static com.oracle.graal.lir.sparc.SPARCMove.*; 032import static jdk.internal.jvmci.code.ValueUtil.*; 033import static jdk.internal.jvmci.sparc.SPARC.*; 034 035import java.util.*; 036 037import jdk.internal.jvmci.code.*; 038import jdk.internal.jvmci.common.*; 039import jdk.internal.jvmci.meta.*; 040import jdk.internal.jvmci.sparc.*; 041import jdk.internal.jvmci.sparc.SPARC.CPUFeature; 042 043import com.oracle.graal.asm.*; 044import com.oracle.graal.asm.Assembler.LabelHint; 045import com.oracle.graal.asm.sparc.*; 046import com.oracle.graal.asm.sparc.SPARCAssembler.BranchPredict; 047import com.oracle.graal.asm.sparc.SPARCAssembler.CC; 048import com.oracle.graal.asm.sparc.SPARCAssembler.ConditionFlag; 049import com.oracle.graal.asm.sparc.SPARCMacroAssembler.ScratchRegister; 050import com.oracle.graal.asm.sparc.SPARCMacroAssembler.Setx; 051import com.oracle.graal.compiler.common.calc.*; 052import com.oracle.graal.lir.*; 053import com.oracle.graal.lir.SwitchStrategy.BaseSwitchClosure; 054import com.oracle.graal.lir.asm.*; 055 056public class SPARCControlFlow { 057 // This describes the maximum offset between the first emitted (load constant in to scratch, 058 // if does not fit into simm5 of cbcond) instruction and the final branch instruction 059 private static final int maximumSelfOffsetInstructions = 2; 060 061 public static final class ReturnOp extends SPARCBlockEndOp { 062 public static final LIRInstructionClass<ReturnOp> TYPE = LIRInstructionClass.create(ReturnOp.class); 063 public static final SizeEstimate SIZE = SizeEstimate.create(2); 064 065 @Use({REG, ILLEGAL}) protected Value x; 066 067 public ReturnOp(Value x) { 068 super(TYPE, SIZE); 069 this.x = x; 070 } 071 072 @Override 073 public void emitCode(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 074 emitCodeHelper(crb, masm); 075 } 076 077 public static void emitCodeHelper(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 078 masm.ret(); 079 // On SPARC we always leave the frame (in the delay slot). 080 crb.frameContext.leave(crb); 081 } 082 } 083 084 public static final class CompareBranchOp extends SPARCBlockEndOp implements SPARCDelayedControlTransfer { 085 public static final LIRInstructionClass<CompareBranchOp> TYPE = LIRInstructionClass.create(CompareBranchOp.class); 086 public static final SizeEstimate SIZE = SizeEstimate.create(3); 087 static final EnumSet<Kind> SUPPORTED_KINDS = EnumSet.of(Kind.Long, Kind.Int, Kind.Object, Kind.Float, Kind.Double); 088 089 private final SPARCCompare opcode; 090 @Use({REG}) protected Value x; 091 @Use({REG, CONST}) protected Value y; 092 private ConditionFlag conditionFlag; 093 protected final LabelRef trueDestination; 094 protected LabelHint trueDestinationHint; 095 protected final LabelRef falseDestination; 096 protected LabelHint falseDestinationHint; 097 protected final Kind kind; 098 protected final boolean unorderedIsTrue; 099 private boolean emitted = false; 100 private int delaySlotPosition = -1; 101 private double trueDestinationProbability; 102 103 public CompareBranchOp(SPARCCompare opcode, Value x, Value y, Condition condition, LabelRef trueDestination, LabelRef falseDestination, Kind kind, boolean unorderedIsTrue, 104 double trueDestinationProbability) { 105 super(TYPE, SIZE); 106 this.opcode = opcode; 107 this.x = x; 108 this.y = y; 109 this.trueDestination = trueDestination; 110 this.falseDestination = falseDestination; 111 this.kind = kind; 112 this.unorderedIsTrue = unorderedIsTrue; 113 this.trueDestinationProbability = trueDestinationProbability; 114 CC conditionCodeReg = CC.forKind(kind); 115 conditionFlag = fromCondition(conditionCodeReg, condition, unorderedIsTrue); 116 } 117 118 @Override 119 public void emitCode(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 120 if (emitted) { // Only if delayed control transfer is used we must check this 121 assert masm.position() - delaySlotPosition == 4 : "Only one instruction can be stuffed into the delay slot"; 122 } 123 if (!emitted) { 124 requestHints(masm); 125 int targetPosition = getTargetPosition(masm); 126 if (canUseShortBranch(crb, masm, targetPosition)) { 127 emitted = emitShortCompareBranch(crb, masm); 128 } 129 if (!emitted) { // No short compare/branch was used, so we go into fallback 130 SPARCCompare.emit(crb, masm, opcode, x, y); 131 emitted = emitBranch(crb, masm, kind, conditionFlag, trueDestination, falseDestination, true, trueDestinationProbability); 132 } 133 } 134 assert emitted; 135 } 136 137 private static int getTargetPosition(Assembler asm) { 138 return asm.position() + maximumSelfOffsetInstructions * asm.target.wordSize; 139 } 140 141 public void emitControlTransfer(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 142 requestHints(masm); 143 // When we use short branches, no delay slot is available 144 int targetPosition = getTargetPosition(masm); 145 if (!canUseShortBranch(crb, masm, targetPosition)) { 146 SPARCCompare.emit(crb, masm, opcode, x, y); 147 emitted = emitBranch(crb, masm, kind, conditionFlag, trueDestination, falseDestination, false, trueDestinationProbability); 148 if (emitted) { 149 delaySlotPosition = masm.position(); 150 } 151 } 152 } 153 154 private void requestHints(SPARCMacroAssembler masm) { 155 if (trueDestinationHint == null) { 156 this.trueDestinationHint = masm.requestLabelHint(trueDestination.label()); 157 } 158 if (falseDestinationHint == null) { 159 this.falseDestinationHint = masm.requestLabelHint(falseDestination.label()); 160 } 161 } 162 163 /** 164 * Tries to use the emit the compare/branch instruction. 165 * <p> 166 * CBcond has follwing limitations 167 * <ul> 168 * <li>Immediate field is only 5 bit and is on the right 169 * <li>Jump offset is maximum of -+512 instruction 170 * 171 * <p> 172 * We get from outside 173 * <ul> 174 * <li>at least one of trueDestination falseDestination is within reach of +-512 175 * instructions 176 * <li>two registers OR one register and a constant which fits simm13 177 * 178 * <p> 179 * We do: 180 * <ul> 181 * <li>find out which target needs to be branched conditionally 182 * <li>find out if fall-through is possible, if not, a unconditional branch is needed after 183 * cbcond (needJump=true) 184 * <li>if no fall through: we need to put the closer jump into the cbcond branch and the 185 * farther into the jmp (unconditional branch) 186 * <li>if constant on the left side, mirror to be on the right 187 * <li>if constant on right does not fit into simm5, put it into a scratch register 188 * 189 * @param crb 190 * @param masm 191 * @return true if the branch could be emitted 192 */ 193 private boolean emitShortCompareBranch(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 194 Value tmpValue; 195 Value actualX = x; 196 Value actualY = y; 197 ConditionFlag actualConditionFlag = conditionFlag; 198 Label actualTrueTarget = trueDestination.label(); 199 Label actualFalseTarget = falseDestination.label(); 200 Label tmpTarget; 201 boolean needJump; 202 if (crb.isSuccessorEdge(trueDestination)) { 203 actualConditionFlag = conditionFlag.negate(); 204 tmpTarget = actualTrueTarget; 205 actualTrueTarget = actualFalseTarget; 206 actualFalseTarget = tmpTarget; 207 needJump = false; 208 } else { 209 needJump = !crb.isSuccessorEdge(falseDestination); 210 int targetPosition = getTargetPosition(masm); 211 if (needJump && !isShortBranch(masm, targetPosition, trueDestinationHint, actualTrueTarget)) { 212 // we have to jump in either way, so we must put the shorter 213 // branch into the actualTarget as only one of the two jump targets 214 // is guaranteed to be simm10 215 actualConditionFlag = actualConditionFlag.negate(); 216 tmpTarget = actualTrueTarget; 217 actualTrueTarget = actualFalseTarget; 218 actualFalseTarget = tmpTarget; 219 } 220 } 221 // Keep the constant on the right 222 if (isConstant(actualX)) { 223 tmpValue = actualX; 224 actualX = actualY; 225 actualY = tmpValue; 226 actualConditionFlag = actualConditionFlag.mirror(); 227 } 228 try (ScratchRegister scratch = masm.getScratchRegister()) { 229 emitCBCond(masm, actualX, actualY, actualTrueTarget, actualConditionFlag); 230 } 231 if (needJump) { 232 masm.jmp(actualFalseTarget); 233 masm.nop(); 234 } 235 return true; 236 } 237 238 private static void emitCBCond(SPARCMacroAssembler masm, Value actualX, Value actualY, Label actualTrueTarget, ConditionFlag conditionFlag) { 239 switch ((Kind) actualX.getLIRKind().getPlatformKind()) { 240 case Int: 241 if (isConstant(actualY)) { 242 int constantY = asConstant(actualY).asInt(); 243 masm.cbcondw(conditionFlag, asIntReg(actualX), constantY, actualTrueTarget); 244 } else { 245 masm.cbcondw(conditionFlag, asIntReg(actualX), asIntReg(actualY), actualTrueTarget); 246 } 247 break; 248 case Long: 249 if (isConstant(actualY)) { 250 int constantY = (int) asConstant(actualY).asLong(); 251 masm.cbcondx(conditionFlag, asLongReg(actualX), constantY, actualTrueTarget); 252 } else { 253 masm.cbcondx(conditionFlag, asLongReg(actualX), asLongReg(actualY), actualTrueTarget); 254 } 255 break; 256 case Object: 257 if (isConstant(actualY)) { 258 // Object constant valid can only be null 259 assert asConstant(actualY).isNull(); 260 masm.cbcondx(conditionFlag, asObjectReg(actualX), 0, actualTrueTarget); 261 } else { // this is already loaded 262 masm.cbcondx(conditionFlag, asObjectReg(actualX), asObjectReg(actualY), actualTrueTarget); 263 } 264 break; 265 default: 266 JVMCIError.shouldNotReachHere(); 267 } 268 } 269 270 private boolean canUseShortBranch(CompilationResultBuilder crb, SPARCAssembler asm, int position) { 271 if (!asm.hasFeature(CPUFeature.CBCOND)) { 272 return false; 273 } 274 switch ((Kind) x.getPlatformKind()) { 275 case Int: 276 case Long: 277 case Object: 278 break; 279 default: 280 return false; 281 } 282 // Do not use short branch, if the y value is a constant and does not fit into simm5 but 283 // fits into simm13; this means the code with CBcond would be longer as the code without 284 // CBcond. 285 if (isConstant(y) && !isSimm5(asConstant(y)) && isSimm13(asConstant(y))) { 286 return false; 287 } 288 boolean hasShortJumpTarget = false; 289 if (!crb.isSuccessorEdge(trueDestination)) { 290 hasShortJumpTarget |= isShortBranch(asm, position, trueDestinationHint, trueDestination.label()); 291 } 292 if (!crb.isSuccessorEdge(falseDestination)) { 293 hasShortJumpTarget |= isShortBranch(asm, position, falseDestinationHint, falseDestination.label()); 294 } 295 return hasShortJumpTarget; 296 } 297 298 public void resetState() { 299 emitted = false; 300 delaySlotPosition = -1; 301 } 302 303 @Override 304 public void verify() { 305 super.verify(); 306 assert SUPPORTED_KINDS.contains(kind) : kind; 307 assert x.getKind().equals(kind) && y.getKind().equals(kind) : x + " " + y; 308 } 309 } 310 311 private static boolean isShortBranch(SPARCAssembler asm, int position, LabelHint hint, Label label) { 312 int disp = 0; 313 boolean dispValid = true; 314 if (label.isBound()) { 315 disp = label.position() - position; 316 } else if (hint != null && hint.isValid()) { 317 disp = hint.getTarget() - hint.getPosition(); 318 } else { 319 dispValid = false; 320 } 321 if (dispValid) { 322 if (disp < 0) { 323 disp -= maximumSelfOffsetInstructions * asm.target.wordSize; 324 } else { 325 disp += maximumSelfOffsetInstructions * asm.target.wordSize; 326 } 327 return isSimm10(disp >> 2); 328 } else if (hint == null) { 329 asm.requestLabelHint(label); 330 } 331 return false; 332 } 333 334 public static final class BranchOp extends SPARCBlockEndOp implements StandardOp.BranchOp { 335 public static final LIRInstructionClass<BranchOp> TYPE = LIRInstructionClass.create(BranchOp.class); 336 public static final SizeEstimate SIZE = SizeEstimate.create(2); 337 protected final ConditionFlag conditionFlag; 338 protected final LabelRef trueDestination; 339 protected final LabelRef falseDestination; 340 protected final Kind kind; 341 protected final double trueDestinationProbability; 342 343 public BranchOp(ConditionFlag conditionFlag, LabelRef trueDestination, LabelRef falseDestination, Kind kind, double trueDestinationProbability) { 344 super(TYPE, SIZE); 345 this.trueDestination = trueDestination; 346 this.falseDestination = falseDestination; 347 this.kind = kind; 348 this.conditionFlag = conditionFlag; 349 this.trueDestinationProbability = trueDestinationProbability; 350 } 351 352 @Override 353 public void emitCode(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 354 emitBranch(crb, masm, kind, conditionFlag, trueDestination, falseDestination, true, trueDestinationProbability); 355 } 356 357 @Override 358 public void verify() { 359 assert CompareBranchOp.SUPPORTED_KINDS.contains(kind); 360 } 361 } 362 363 private static boolean emitBranch(CompilationResultBuilder crb, SPARCMacroAssembler masm, Kind kind, ConditionFlag conditionFlag, LabelRef trueDestination, LabelRef falseDestination, 364 boolean withDelayedNop, double trueDestinationProbability) { 365 Label actualTarget; 366 ConditionFlag actualConditionFlag; 367 boolean needJump; 368 BranchPredict predictTaken; 369 if (falseDestination != null && crb.isSuccessorEdge(trueDestination)) { 370 actualConditionFlag = conditionFlag != null ? conditionFlag.negate() : null; 371 actualTarget = falseDestination.label(); 372 needJump = false; 373 predictTaken = trueDestinationProbability < .5d ? PREDICT_TAKEN : PREDICT_NOT_TAKEN; 374 } else { 375 actualConditionFlag = conditionFlag; 376 actualTarget = trueDestination.label(); 377 needJump = falseDestination != null && !crb.isSuccessorEdge(falseDestination); 378 predictTaken = trueDestinationProbability > .5d ? PREDICT_TAKEN : PREDICT_NOT_TAKEN; 379 } 380 if (!withDelayedNop && needJump) { 381 // We cannot make use of the delay slot when we jump in true-case and false-case 382 return false; 383 } 384 if (kind == Kind.Double || kind == Kind.Float) { 385 masm.fbpcc(actualConditionFlag, NOT_ANNUL, actualTarget, CC.Fcc0, predictTaken); 386 } else { 387 CC cc = kind == Kind.Int ? CC.Icc : CC.Xcc; 388 masm.bpcc(actualConditionFlag, NOT_ANNUL, actualTarget, cc, predictTaken); 389 } 390 if (withDelayedNop) { 391 masm.nop(); // delay slot 392 } 393 if (needJump) { 394 masm.jmp(falseDestination.label()); 395 } 396 return true; 397 } 398 399 public static final class StrategySwitchOp extends SPARCBlockEndOp { 400 public static final LIRInstructionClass<StrategySwitchOp> TYPE = LIRInstructionClass.create(StrategySwitchOp.class); 401 @Use({CONST}) protected JavaConstant[] keyConstants; 402 private final LabelRef[] keyTargets; 403 private LabelRef defaultTarget; 404 @Alive({REG}) protected Value key; 405 @Alive({REG, ILLEGAL}) protected Value constantTableBase; 406 @Temp({REG}) protected Value scratch; 407 private final SwitchStrategy strategy; 408 private final Map<Label, LabelHint> labelHints; 409 private final List<Label> conditionalLabels = new ArrayList<>(); 410 411 public StrategySwitchOp(Value constantTableBase, SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Value key, Value scratch) { 412 super(TYPE); 413 this.strategy = strategy; 414 this.keyConstants = strategy.keyConstants; 415 this.keyTargets = keyTargets; 416 this.defaultTarget = defaultTarget; 417 this.constantTableBase = constantTableBase; 418 this.key = key; 419 this.scratch = scratch; 420 this.labelHints = new HashMap<>(); 421 assert keyConstants.length == keyTargets.length; 422 assert keyConstants.length == strategy.keyProbabilities.length; 423 } 424 425 @Override 426 public void emitCode(final CompilationResultBuilder crb, final SPARCMacroAssembler masm) { 427 final Register keyRegister = asRegister(key); 428 final Register constantBaseRegister = AllocatableValue.ILLEGAL.equals(constantTableBase) ? g0 : asRegister(constantTableBase); 429 BaseSwitchClosure closure = new BaseSwitchClosure(crb, masm, keyTargets, defaultTarget) { 430 int conditionalLabelPointer = 0; 431 432 /** 433 * This method caches the generated labels over two assembly passes to get 434 * information about branch lengths. 435 */ 436 @Override 437 public Label conditionalJump(int index, Condition condition) { 438 Label label; 439 if (conditionalLabelPointer <= conditionalLabels.size()) { 440 label = new Label(); 441 conditionalLabels.add(label); 442 conditionalLabelPointer = conditionalLabels.size(); 443 } else { 444 // TODO: (sa) We rely here on the order how the labels are generated during 445 // code generation; if the order is not stable ower two assembly passes, the 446 // result can be wrong 447 label = conditionalLabels.get(conditionalLabelPointer++); 448 } 449 conditionalJump(index, condition, label); 450 return label; 451 } 452 453 @Override 454 protected void conditionalJump(int index, Condition condition, Label target) { 455 JavaConstant constant = keyConstants[index]; 456 CC conditionCode; 457 Long bits; 458 switch (key.getKind()) { 459 case Char: 460 case Byte: 461 case Short: 462 case Int: 463 conditionCode = CC.Icc; 464 bits = constant.asLong(); 465 break; 466 case Long: { 467 conditionCode = CC.Xcc; 468 bits = constant.asLong(); 469 break; 470 } 471 case Object: { 472 conditionCode = crb.codeCache.getTarget().wordKind == Kind.Long ? CC.Xcc : CC.Icc; 473 bits = constant.isDefaultForKind() ? 0L : null; 474 break; 475 } 476 default: 477 throw new JVMCIError("switch only supported for int, long and object"); 478 } 479 ConditionFlag conditionFlag = fromCondition(conditionCode, condition, false); 480 LabelHint hint = requestHint(masm, target); 481 boolean isShortConstant = isSimm5(constant); 482 int cbCondPosition = masm.position(); 483 if (!isShortConstant) { // Load constant takes one instruction 484 cbCondPosition += SPARC.INSTRUCTION_SIZE; 485 } 486 boolean canUseShortBranch = masm.hasFeature(CPUFeature.CBCOND) && isShortBranch(masm, cbCondPosition, hint, target); 487 if (bits != null && canUseShortBranch) { 488 if (isShortConstant) { 489 if (conditionCode == Icc) { 490 masm.cbcondw(conditionFlag, keyRegister, (int) (long) bits, target); 491 } else { 492 masm.cbcondx(conditionFlag, keyRegister, (int) (long) bits, target); 493 } 494 } else { 495 Register scratchRegister = asRegister(scratch); 496 const2reg(crb, masm, scratch, constantBaseRegister, keyConstants[index], SPARCDelayedControlTransfer.DUMMY); 497 if (conditionCode == Icc) { 498 masm.cbcondw(conditionFlag, keyRegister, scratchRegister, target); 499 } else { 500 masm.cbcondx(conditionFlag, keyRegister, scratchRegister, target); 501 } 502 } 503 } else { 504 if (bits != null && isSimm13(constant)) { 505 masm.cmp(keyRegister, (int) (long) bits); // Cast is safe 506 } else { 507 Register scratchRegister = asRegister(scratch); 508 const2reg(crb, masm, scratch, constantBaseRegister, keyConstants[index], SPARCDelayedControlTransfer.DUMMY); 509 masm.cmp(keyRegister, scratchRegister); 510 } 511 masm.bpcc(conditionFlag, ANNUL, target, conditionCode, PREDICT_TAKEN); 512 masm.nop(); // delay slot 513 } 514 } 515 }; 516 strategy.run(closure); 517 } 518 519 private LabelHint requestHint(SPARCMacroAssembler masm, Label label) { 520 LabelHint hint = labelHints.get(label); 521 if (hint == null) { 522 hint = masm.requestLabelHint(label); 523 labelHints.put(label, hint); 524 } 525 return hint; 526 } 527 528 @Override 529 public SizeEstimate estimateSize() { 530 int constantBytes = 0; 531 for (JavaConstant v : keyConstants) { 532 if (!SPARCAssembler.isSimm13(v)) { 533 constantBytes += v.getKind().getByteCount(); 534 } 535 } 536 return new SizeEstimate(4 * keyTargets.length, constantBytes); 537 } 538 } 539 540 public static final class TableSwitchOp extends SPARCBlockEndOp { 541 public static final LIRInstructionClass<TableSwitchOp> TYPE = LIRInstructionClass.create(TableSwitchOp.class); 542 543 private final int lowKey; 544 private final LabelRef defaultTarget; 545 private final LabelRef[] targets; 546 @Alive protected Value index; 547 @Temp protected Value scratch; 548 549 public TableSwitchOp(final int lowKey, final LabelRef defaultTarget, final LabelRef[] targets, Variable index, Variable scratch) { 550 super(TYPE); 551 this.lowKey = lowKey; 552 this.defaultTarget = defaultTarget; 553 this.targets = targets; 554 this.index = index; 555 this.scratch = scratch; 556 } 557 558 @Override 559 public void emitCode(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 560 Register value = asIntReg(index); 561 Register scratchReg = asLongReg(scratch); 562 563 // Compare index against jump table bounds 564 int highKey = lowKey + targets.length - 1; 565 566 // subtract the low value from the switch value 567 if (isSimm13(lowKey)) { 568 masm.sub(value, lowKey, scratchReg); 569 } else { 570 try (ScratchRegister sc = masm.getScratchRegister()) { 571 Register scratch2 = sc.getRegister(); 572 new Setx(lowKey, scratch2).emit(masm); 573 masm.sub(value, scratch2, scratchReg); 574 } 575 } 576 int upperLimit = highKey - lowKey; 577 try (ScratchRegister sc = masm.getScratchRegister()) { 578 Register scratch2 = sc.getRegister(); 579 if (isSimm13(upperLimit)) { 580 masm.cmp(scratchReg, upperLimit); 581 } else { 582 new Setx(upperLimit, scratch2).emit(masm); 583 masm.cmp(scratchReg, upperLimit); 584 } 585 586 // Jump to default target if index is not within the jump table 587 if (defaultTarget != null) { 588 masm.bpcc(GreaterUnsigned, NOT_ANNUL, defaultTarget.label(), Icc, PREDICT_TAKEN); 589 masm.nop(); // delay slot 590 } 591 592 // Load jump table entry into scratch and jump to it 593 masm.sll(scratchReg, 3, scratchReg); // Multiply by 8 594 // Zero the left bits sll with shcnt>0 does not mask upper 32 bits 595 masm.srl(scratchReg, 0, scratchReg); 596 masm.rdpc(scratch2); 597 598 // The jump table follows four instructions after rdpc 599 masm.add(scratchReg, 4 * 4, scratchReg); 600 masm.jmpl(scratch2, scratchReg, g0); 601 } 602 masm.nop(); 603 604 // Emit jump table entries 605 for (LabelRef target : targets) { 606 masm.bpcc(Always, NOT_ANNUL, target.label(), Xcc, PREDICT_TAKEN); 607 masm.nop(); // delay slot 608 } 609 } 610 611 @Override 612 public SizeEstimate estimateSize() { 613 return SizeEstimate.create(17 + targets.length * 2); 614 } 615 } 616 617 @Opcode("CMOVE") 618 public static final class CondMoveOp extends SPARCLIRInstruction { 619 public static final LIRInstructionClass<CondMoveOp> TYPE = LIRInstructionClass.create(CondMoveOp.class); 620 621 @Def({REG, HINT}) protected Value result; 622 @Use({REG, CONST}) protected Value trueValue; 623 @Use({REG, CONST}) protected Value falseValue; 624 625 private final ConditionFlag condition; 626 private final CC cc; 627 628 public CondMoveOp(Variable result, CC cc, ConditionFlag condition, Value trueValue, Value falseValue) { 629 super(TYPE); 630 this.result = result; 631 this.condition = condition; 632 this.trueValue = trueValue; 633 this.falseValue = falseValue; 634 this.cc = cc; 635 } 636 637 @Override 638 public void emitCode(CompilationResultBuilder crb, SPARCMacroAssembler masm) { 639 if (result.equals(trueValue)) { // We have the true value in place, do he opposite 640 cmove(masm, cc, result, condition.negate(), falseValue); 641 } else if (result.equals(falseValue)) { 642 cmove(masm, cc, result, condition, trueValue); 643 } else { // We have to move one of the input values to the result 644 ConditionFlag actualCondition = condition; 645 Value actualTrueValue = trueValue; 646 Value actualFalseValue = falseValue; 647 if (isConstant(falseValue) && isSimm11(asConstant(falseValue))) { 648 actualCondition = condition.negate(); 649 actualTrueValue = falseValue; 650 actualFalseValue = trueValue; 651 } 652 SPARCMove.move(crb, masm, result, actualFalseValue, SPARCDelayedControlTransfer.DUMMY); 653 cmove(masm, cc, result, actualCondition, actualTrueValue); 654 } 655 } 656 657 @Override 658 public SizeEstimate estimateSize() { 659 int constantSize = 0; 660 if (isConstant(trueValue) && !SPARCAssembler.isSimm13(asConstant(trueValue))) { 661 constantSize += trueValue.getKind().getByteCount(); 662 } 663 if (isConstant(falseValue) && !SPARCAssembler.isSimm13(asConstant(falseValue))) { 664 constantSize += trueValue.getKind().getByteCount(); 665 } 666 return SizeEstimate.create(3, constantSize); 667 } 668 } 669 670 private static void cmove(SPARCMacroAssembler masm, CC cc, Value result, ConditionFlag cond, Value other) { 671 switch (other.getKind()) { 672 case Boolean: 673 case Byte: 674 case Short: 675 case Char: 676 case Int: 677 if (isConstant(other)) { 678 int constant; 679 if (asConstant(other).isNull()) { 680 constant = 0; 681 } else { 682 constant = asConstant(other).asInt(); 683 } 684 masm.movcc(cond, cc, constant, asRegister(result)); 685 } else { 686 masm.movcc(cond, cc, asRegister(other), asRegister(result)); 687 } 688 break; 689 case Long: 690 case Object: 691 if (isConstant(other)) { 692 long constant; 693 if (asConstant(other).isNull()) { 694 constant = 0; 695 } else { 696 constant = asConstant(other).asLong(); 697 } 698 masm.movcc(cond, cc, (int) constant, asRegister(result)); 699 } else { 700 masm.movcc(cond, cc, asRegister(other), asRegister(result)); 701 } 702 break; 703 case Float: 704 masm.fmovscc(cond, cc, asFloatReg(other), asFloatReg(result)); 705 break; 706 case Double: 707 masm.fmovdcc(cond, cc, asDoubleReg(other), asDoubleReg(result)); 708 break; 709 default: 710 throw JVMCIError.shouldNotReachHere(); 711 } 712 } 713 714 public static ConditionFlag fromCondition(CC conditionFlagsRegister, Condition cond, boolean unorderedIsTrue) { 715 switch (conditionFlagsRegister) { 716 case Xcc: 717 case Icc: 718 switch (cond) { 719 case EQ: 720 return Equal; 721 case NE: 722 return NotEqual; 723 case BT: 724 return LessUnsigned; 725 case LT: 726 return Less; 727 case BE: 728 return LessEqualUnsigned; 729 case LE: 730 return LessEqual; 731 case AE: 732 return GreaterEqualUnsigned; 733 case GE: 734 return GreaterEqual; 735 case AT: 736 return GreaterUnsigned; 737 case GT: 738 return Greater; 739 } 740 throw JVMCIError.shouldNotReachHere("Unimplemented for: " + cond); 741 case Fcc0: 742 case Fcc1: 743 case Fcc2: 744 case Fcc3: 745 switch (cond) { 746 case EQ: 747 return unorderedIsTrue ? F_UnorderedOrEqual : F_Equal; 748 case NE: 749 return ConditionFlag.F_NotEqual; 750 case LT: 751 return unorderedIsTrue ? F_UnorderedOrLess : F_Less; 752 case LE: 753 return unorderedIsTrue ? F_UnorderedOrLessOrEqual : F_LessOrEqual; 754 case GE: 755 return unorderedIsTrue ? F_UnorderedGreaterOrEqual : F_GreaterOrEqual; 756 case GT: 757 return unorderedIsTrue ? F_UnorderedOrGreater : F_Greater; 758 } 759 throw JVMCIError.shouldNotReachHere("Unkown condition: " + cond); 760 } 761 throw JVMCIError.shouldNotReachHere("Unknown condition flag register " + conditionFlagsRegister); 762 } 763}