comparison graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotLIRGenerator.java @ 13520:fb7b39f07232

Embed compressed constants when possible and use more efficient patterns for encoding
author Tom Rodriguez <tom.rodriguez@oracle.com>
date Mon, 06 Jan 2014 17:19:18 -0800
parents 6dd9a1455e64
children da9d9823628f
comparison
equal deleted inserted replaced
13519:1ceb90be7bac 13520:fb7b39f07232
38 import com.oracle.graal.compiler.amd64.*; 38 import com.oracle.graal.compiler.amd64.*;
39 import com.oracle.graal.compiler.gen.*; 39 import com.oracle.graal.compiler.gen.*;
40 import com.oracle.graal.debug.*; 40 import com.oracle.graal.debug.*;
41 import com.oracle.graal.graph.*; 41 import com.oracle.graal.graph.*;
42 import com.oracle.graal.hotspot.*; 42 import com.oracle.graal.hotspot.*;
43 import com.oracle.graal.hotspot.HotSpotVMConfig.CompressEncoding;
43 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.CompareAndSwapCompressedOp; 44 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.CompareAndSwapCompressedOp;
44 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.LoadCompressedPointer; 45 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.LoadCompressedPointer;
45 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.StoreCompressedConstantOp; 46 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.StoreCompressedConstantOp;
46 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.StoreCompressedPointer; 47 import com.oracle.graal.hotspot.amd64.AMD64HotSpotMove.StoreCompressedPointer;
47 import com.oracle.graal.hotspot.meta.*; 48 import com.oracle.graal.hotspot.meta.*;
444 op.savedRbp = savedRbp; 445 op.savedRbp = savedRbp;
445 } 446 }
446 } 447 }
447 448
448 /** 449 /**
449 * Returns whether or not the input access is a (de)compression candidate. 450 * Returns whether or not the input access should be (de)compressed.
450 */ 451 */
451 private static boolean isCompressCandidate(Access access) { 452 private boolean isCompressedOperation(Kind kind, Access access) {
452 return access != null && access.isCompressible(); 453 return access != null && access.isCompressible() && ((kind == Kind.Long && config.useCompressedClassPointers) || (kind == Kind.Object && config.useCompressedOops));
454 }
455
456 /**
457 * @return a compressed version of the incoming constant
458 */
459 protected static Constant compress(Constant c, CompressEncoding encoding) {
460 if (c.getKind() == Kind.Long) {
461 return Constant.forIntegerKind(Kind.Int, (int) (((c.asLong() - encoding.base) >> encoding.shift) & 0xffffffffL), c.getPrimitiveAnnotation());
462 } else if (c.getKind() == Kind.Object) {
463 return Constant.forIntegerKind(Kind.Int, 0xdeaddead, c.asObject());
464 } else {
465 throw GraalInternalError.shouldNotReachHere();
466 }
453 } 467 }
454 468
455 @Override 469 @Override
456 public Variable emitLoad(Kind kind, Value address, Access access) { 470 public Variable emitLoad(Kind kind, Value address, Access access) {
457 AMD64AddressValue loadAddress = asAddressValue(address); 471 AMD64AddressValue loadAddress = asAddressValue(address);
463 /** 477 /**
464 * Currently, the (de)compression of pointers applies conditionally to some objects (oops, 478 * Currently, the (de)compression of pointers applies conditionally to some objects (oops,
465 * kind==Object) and some addresses (klass pointers, kind==Long). Initially, the input 479 * kind==Object) and some addresses (klass pointers, kind==Long). Initially, the input
466 * operation is checked to discover if it has been tagged as a potential "compression" 480 * operation is checked to discover if it has been tagged as a potential "compression"
467 * candidate. Consequently, depending on the appropriate kind, the specific (de)compression 481 * candidate. Consequently, depending on the appropriate kind, the specific (de)compression
468 * functions are being called. Although, currently, the compression and decompression 482 * functions are being called.
469 * algorithms of oops and klass pointers are identical, in hotspot, they are implemented as
470 * separate methods. That means that in the future there might be the case where the
471 * algorithms may differ.
472 */ 483 */
473 if (isCompressCandidate(access)) { 484 if (isCompressedOperation(kind, access)) {
474 if (config.useCompressedOops && kind == Kind.Object) { 485 if (kind == Kind.Object) {
475 append(new LoadCompressedPointer(kind, result, getProviders().getRegisters().getHeapBaseRegister().asValue(), loadAddress, state, getNarrowKlassBase(), getNarrowOopBase(), 486 append(new LoadCompressedPointer(kind, result, getProviders().getRegisters().getHeapBaseRegister().asValue(), loadAddress, state, config.getOopEncoding()));
476 getNarrowOopShift(), getLogMinObjectAlignment())); 487 } else if (kind == Kind.Long) {
477 } else if (config.useCompressedClassPointers && kind == Kind.Long) { 488 Variable scratch = config.getKlassEncoding().base != 0 ? newVariable(Kind.Long) : null;
478 append(new LoadCompressedPointer(kind, result, getProviders().getRegisters().getHeapBaseRegister().asValue(), loadAddress, state, getNarrowKlassBase(), getNarrowOopBase(), 489 append(new LoadCompressedPointer(kind, result, scratch, loadAddress, state, config.getKlassEncoding()));
479 getNarrowKlassShift(), getLogKlassAlignment()));
480 } else { 490 } else {
481 append(new LoadOp(kind, result, loadAddress, state)); 491 throw GraalInternalError.shouldNotReachHere("can't handle: " + access);
482 } 492 }
483 } else { 493 } else {
484 append(new LoadOp(kind, result, loadAddress, state)); 494 append(new LoadOp(kind, result, loadAddress, state));
485 } 495 }
486 return result; 496 return result;
491 AMD64AddressValue storeAddress = asAddressValue(address); 501 AMD64AddressValue storeAddress = asAddressValue(address);
492 LIRFrameState state = null; 502 LIRFrameState state = null;
493 if (access instanceof DeoptimizingNode) { 503 if (access instanceof DeoptimizingNode) {
494 state = state((DeoptimizingNode) access); 504 state = state((DeoptimizingNode) access);
495 } 505 }
506 boolean isCompressed = isCompressedOperation(kind, access);
496 if (isConstant(inputVal)) { 507 if (isConstant(inputVal)) {
497 Constant c = asConstant(inputVal); 508 Constant c = asConstant(inputVal);
498 if (canStoreConstant(c)) { 509 if (isCompressed && canStoreConstant(c, isCompressed)) {
499 if (inputVal.getKind() == Kind.Object && config.useCompressedOops && isCompressCandidate(access)) { 510 if (c.getKind() == Kind.Object) {
500 append(new StoreCompressedConstantOp(kind, storeAddress, c, state)); 511 Constant value = c.isNull() ? c : compress(c, config.getOopEncoding());
501 } else if (inputVal.getKind() == Kind.Long && config.useCompressedClassPointers && isCompressCandidate(access)) { 512 append(new StoreCompressedConstantOp(kind, storeAddress, value, state));
502 append(new StoreCompressedConstantOp(kind, storeAddress, c, state)); 513 } else if (c.getKind() == Kind.Long) {
514 // It's always a good idea to directly store compressed constants since they
515 // have to be materialized as 64 bits encoded otherwise.
516 Constant value = compress(c, config.getKlassEncoding());
517 append(new StoreCompressedConstantOp(kind, storeAddress, value, state));
503 } else { 518 } else {
504 append(new StoreConstantOp(kind, storeAddress, c, state)); 519 throw GraalInternalError.shouldNotReachHere("can't handle: " + access);
505 } 520 }
506 return; 521 return;
507 } 522 }
508 } 523 }
509 Variable input = load(inputVal); 524 Variable input = load(inputVal);
510 if (isCompressCandidate(access)) { 525 if (isCompressed) {
511 if (config.useCompressedOops && kind == Kind.Object) { 526 if (kind == Kind.Object) {
512 if (input.getKind() == Kind.Object) { 527 if (input.getKind() == Kind.Object) {
513 Variable scratch = newVariable(Kind.Long); 528 Variable scratch = newVariable(Kind.Long);
514 Register heapBaseReg = getProviders().getRegisters().getHeapBaseRegister(); 529 Register heapBaseReg = getProviders().getRegisters().getHeapBaseRegister();
515 append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, getNarrowKlassBase(), getNarrowOopBase(), getNarrowOopShift(), getLogMinObjectAlignment(), heapBaseReg)); 530 append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, config.getOopEncoding(), heapBaseReg));
516 } else { 531 } else {
517 // the input oop is already compressed 532 // the input oop is already compressed
518 append(new StoreOp(input.getKind(), storeAddress, input, state)); 533 append(new StoreOp(input.getKind(), storeAddress, input, state));
519 } 534 }
520 } else if (config.useCompressedClassPointers && kind == Kind.Long) { 535 } else if (kind == Kind.Long) {
521 Variable scratch = newVariable(Kind.Long); 536 Variable scratch = newVariable(Kind.Long);
522 Register heapBaseReg = getProviders().getRegisters().getHeapBaseRegister(); 537 Register heapBaseReg = getProviders().getRegisters().getHeapBaseRegister();
523 append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, getNarrowKlassBase(), getNarrowOopBase(), getNarrowKlassShift(), getLogKlassAlignment(), heapBaseReg)); 538 append(new StoreCompressedPointer(kind, storeAddress, input, scratch, state, config.getKlassEncoding(), heapBaseReg));
524 } else { 539 } else {
525 append(new StoreOp(kind, storeAddress, input, state)); 540 append(new StoreOp(kind, storeAddress, input, state));
526 } 541 }
527 } else { 542 } else {
528 append(new StoreOp(kind, storeAddress, input, state)); 543 append(new StoreOp(kind, storeAddress, input, state));
529 } 544 }
530 }
531
532 private int getLogMinObjectAlignment() {
533 return config.logMinObjAlignment();
534 }
535
536 private int getNarrowOopShift() {
537 return config.narrowOopShift;
538 }
539
540 private long getNarrowOopBase() {
541 return config.narrowOopBase;
542 }
543
544 private int getLogKlassAlignment() {
545 return config.logKlassAlignment;
546 }
547
548 private int getNarrowKlassShift() {
549 return config.narrowKlassShift;
550 }
551
552 private long getNarrowKlassBase() {
553 return config.narrowKlassBase;
554 } 545 }
555 546
556 @Override 547 @Override
557 public void visitCompareAndSwap(LoweredCompareAndSwapNode node, Value address) { 548 public void visitCompareAndSwap(LoweredCompareAndSwapNode node, Value address) {
558 Kind kind = node.getNewValue().kind(); 549 Kind kind = node.getNewValue().kind();
563 RegisterValue raxRes = AMD64.rax.asValue(kind); 554 RegisterValue raxRes = AMD64.rax.asValue(kind);
564 emitMove(raxRes, expected); 555 emitMove(raxRes, expected);
565 if (config.useCompressedOops && node.isCompressible()) { 556 if (config.useCompressedOops && node.isCompressible()) {
566 Variable scratch = newVariable(Kind.Long); 557 Variable scratch = newVariable(Kind.Long);
567 Register heapBaseReg = getProviders().getRegisters().getHeapBaseRegister(); 558 Register heapBaseReg = getProviders().getRegisters().getHeapBaseRegister();
568 append(new CompareAndSwapCompressedOp(raxRes, addressValue, raxRes, newValue, scratch, getNarrowOopBase(), getNarrowOopShift(), getLogMinObjectAlignment(), heapBaseReg)); 559 append(new CompareAndSwapCompressedOp(raxRes, addressValue, raxRes, newValue, scratch, config.getOopEncoding(), heapBaseReg));
569 } else { 560 } else {
570 append(new CompareAndSwapOp(raxRes, addressValue, raxRes, newValue)); 561 append(new CompareAndSwapOp(raxRes, addressValue, raxRes, newValue));
571 } 562 }
572 Variable result = newVariable(node.kind()); 563 Variable result = newVariable(node.kind());
573 append(new CondMoveOp(result, Condition.EQ, load(Constant.TRUE), Constant.FALSE)); 564 append(new CondMoveOp(result, Condition.EQ, load(Constant.TRUE), Constant.FALSE));