comparison src/share/vm/opto/graphKit.cpp @ 3255:5d046bf49ce7

Merge
author johnc
date Thu, 14 Apr 2011 13:45:41 -0700
parents 6c97c830fb6f 92add02409c9
children 286c498ae0d4
comparison
equal deleted inserted replaced
2468:6c97c830fb6f 3255:5d046bf49ce7
1455 1455
1456 return st; 1456 return st;
1457 } 1457 }
1458 1458
1459 1459
1460 void GraphKit::pre_barrier(Node* ctl, 1460 void GraphKit::pre_barrier(bool do_load,
1461 Node* ctl,
1461 Node* obj, 1462 Node* obj,
1462 Node* adr, 1463 Node* adr,
1463 uint adr_idx, 1464 uint adr_idx,
1464 Node* val, 1465 Node* val,
1465 const TypeOopPtr* val_type, 1466 const TypeOopPtr* val_type,
1467 Node* pre_val,
1466 BasicType bt) { 1468 BasicType bt) {
1469
1467 BarrierSet* bs = Universe::heap()->barrier_set(); 1470 BarrierSet* bs = Universe::heap()->barrier_set();
1468 set_control(ctl); 1471 set_control(ctl);
1469 switch (bs->kind()) { 1472 switch (bs->kind()) {
1470 case BarrierSet::G1SATBCT: 1473 case BarrierSet::G1SATBCT:
1471 case BarrierSet::G1SATBCTLogging: 1474 case BarrierSet::G1SATBCTLogging:
1472 g1_write_barrier_pre(obj, adr, adr_idx, val, val_type, bt); 1475 g1_write_barrier_pre(do_load, obj, adr, adr_idx, val, val_type, pre_val, bt);
1473 break; 1476 break;
1474 1477
1475 case BarrierSet::CardTableModRef: 1478 case BarrierSet::CardTableModRef:
1476 case BarrierSet::CardTableExtension: 1479 case BarrierSet::CardTableExtension:
1477 case BarrierSet::ModRef: 1480 case BarrierSet::ModRef:
1530 assert(bt == T_OBJECT, "sanity"); 1533 assert(bt == T_OBJECT, "sanity");
1531 assert(val != NULL, "not dead path"); 1534 assert(val != NULL, "not dead path");
1532 uint adr_idx = C->get_alias_index(adr_type); 1535 uint adr_idx = C->get_alias_index(adr_type);
1533 assert(adr_idx != Compile::AliasIdxTop, "use other store_to_memory factory" ); 1536 assert(adr_idx != Compile::AliasIdxTop, "use other store_to_memory factory" );
1534 1537
1535 pre_barrier(control(), obj, adr, adr_idx, val, val_type, bt); 1538 pre_barrier(true /* do_load */,
1539 control(), obj, adr, adr_idx, val, val_type,
1540 NULL /* pre_val */,
1541 bt);
1542
1536 Node* store = store_to_memory(control(), adr, val, bt, adr_idx); 1543 Node* store = store_to_memory(control(), adr, val, bt, adr_idx);
1537 post_barrier(control(), store, obj, adr, adr_idx, val, bt, use_precise); 1544 post_barrier(control(), store, obj, adr, adr_idx, val, bt, use_precise);
1538 return store; 1545 return store;
1539 } 1546 }
1540 1547
3468 // Final sync IdealKit and GraphKit. 3475 // Final sync IdealKit and GraphKit.
3469 final_sync(ideal); 3476 final_sync(ideal);
3470 } 3477 }
3471 3478
3472 // G1 pre/post barriers 3479 // G1 pre/post barriers
3473 void GraphKit::g1_write_barrier_pre(Node* obj, 3480 void GraphKit::g1_write_barrier_pre(bool do_load,
3481 Node* obj,
3474 Node* adr, 3482 Node* adr,
3475 uint alias_idx, 3483 uint alias_idx,
3476 Node* val, 3484 Node* val,
3477 const TypeOopPtr* val_type, 3485 const TypeOopPtr* val_type,
3486 Node* pre_val,
3478 BasicType bt) { 3487 BasicType bt) {
3488
3489 // Some sanity checks
3490 // Note: val is unused in this routine.
3491
3492 if (do_load) {
3493 // We need to generate the load of the previous value
3494 assert(obj != NULL, "must have a base");
3495 assert(adr != NULL, "where are loading from?");
3496 assert(pre_val == NULL, "loaded already?");
3497 assert(val_type != NULL, "need a type");
3498 } else {
3499 // In this case both val_type and alias_idx are unused.
3500 assert(pre_val != NULL, "must be loaded already");
3501 assert(pre_val->bottom_type()->basic_type() == T_OBJECT, "or we shouldn't be here");
3502 }
3503 assert(bt == T_OBJECT, "or we shouldn't be here");
3504
3479 IdealKit ideal(this, true); 3505 IdealKit ideal(this, true);
3480 3506
3481 Node* tls = __ thread(); // ThreadLocalStorage 3507 Node* tls = __ thread(); // ThreadLocalStorage
3482 3508
3483 Node* no_ctrl = NULL; 3509 Node* no_ctrl = NULL;
3495 PtrQueue::byte_offset_of_active()); 3521 PtrQueue::byte_offset_of_active());
3496 const int index_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 656 3522 const int index_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 656
3497 PtrQueue::byte_offset_of_index()); 3523 PtrQueue::byte_offset_of_index());
3498 const int buffer_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 652 3524 const int buffer_offset = in_bytes(JavaThread::satb_mark_queue_offset() + // 652
3499 PtrQueue::byte_offset_of_buf()); 3525 PtrQueue::byte_offset_of_buf());
3526
3500 // Now the actual pointers into the thread 3527 // Now the actual pointers into the thread
3501
3502 // set_control( ctl);
3503
3504 Node* marking_adr = __ AddP(no_base, tls, __ ConX(marking_offset)); 3528 Node* marking_adr = __ AddP(no_base, tls, __ ConX(marking_offset));
3505 Node* buffer_adr = __ AddP(no_base, tls, __ ConX(buffer_offset)); 3529 Node* buffer_adr = __ AddP(no_base, tls, __ ConX(buffer_offset));
3506 Node* index_adr = __ AddP(no_base, tls, __ ConX(index_offset)); 3530 Node* index_adr = __ AddP(no_base, tls, __ ConX(index_offset));
3507 3531
3508 // Now some of the values 3532 // Now some of the values
3509
3510 Node* marking = __ load(__ ctrl(), marking_adr, TypeInt::INT, active_type, Compile::AliasIdxRaw); 3533 Node* marking = __ load(__ ctrl(), marking_adr, TypeInt::INT, active_type, Compile::AliasIdxRaw);
3511 3534
3512 // if (!marking) 3535 // if (!marking)
3513 __ if_then(marking, BoolTest::ne, zero); { 3536 __ if_then(marking, BoolTest::ne, zero); {
3514 Node* index = __ load(__ ctrl(), index_adr, TypeInt::INT, T_INT, Compile::AliasIdxRaw); 3537 Node* index = __ load(__ ctrl(), index_adr, TypeInt::INT, T_INT, Compile::AliasIdxRaw);
3515 3538
3516 const Type* t1 = adr->bottom_type(); 3539 if (do_load) {
3517 const Type* t2 = val->bottom_type();
3518
3519 Node* orig = __ load(no_ctrl, adr, val_type, bt, alias_idx);
3520 // if (orig != NULL)
3521 __ if_then(orig, BoolTest::ne, null()); {
3522 Node* buffer = __ load(__ ctrl(), buffer_adr, TypeRawPtr::NOTNULL, T_ADDRESS, Compile::AliasIdxRaw);
3523
3524 // load original value 3540 // load original value
3525 // alias_idx correct?? 3541 // alias_idx correct??
3542 pre_val = __ load(no_ctrl, adr, val_type, bt, alias_idx);
3543 }
3544
3545 // if (pre_val != NULL)
3546 __ if_then(pre_val, BoolTest::ne, null()); {
3547 Node* buffer = __ load(__ ctrl(), buffer_adr, TypeRawPtr::NOTNULL, T_ADDRESS, Compile::AliasIdxRaw);
3526 3548
3527 // is the queue for this thread full? 3549 // is the queue for this thread full?
3528 __ if_then(index, BoolTest::ne, zero, likely); { 3550 __ if_then(index, BoolTest::ne, zero, likely); {
3529 3551
3530 // decrement the index 3552 // decrement the index
3534 // We could refine the type for what it's worth 3556 // We could refine the type for what it's worth
3535 // const TypeLong* lidxtype = TypeLong::make(CONST64(0), get_size_from_queue); 3557 // const TypeLong* lidxtype = TypeLong::make(CONST64(0), get_size_from_queue);
3536 next_indexX = _gvn.transform( new (C, 2) ConvI2LNode(next_index, TypeLong::make(0, max_jlong, Type::WidenMax)) ); 3558 next_indexX = _gvn.transform( new (C, 2) ConvI2LNode(next_index, TypeLong::make(0, max_jlong, Type::WidenMax)) );
3537 #endif 3559 #endif
3538 3560
3539 // Now get the buffer location we will log the original value into and store it 3561 // Now get the buffer location we will log the previous value into and store it
3540 Node *log_addr = __ AddP(no_base, buffer, next_indexX); 3562 Node *log_addr = __ AddP(no_base, buffer, next_indexX);
3541 __ store(__ ctrl(), log_addr, orig, T_OBJECT, Compile::AliasIdxRaw); 3563 __ store(__ ctrl(), log_addr, pre_val, T_OBJECT, Compile::AliasIdxRaw);
3542
3543 // update the index 3564 // update the index
3544 __ store(__ ctrl(), index_adr, next_index, T_INT, Compile::AliasIdxRaw); 3565 __ store(__ ctrl(), index_adr, next_index, T_INT, Compile::AliasIdxRaw);
3545 3566
3546 } __ else_(); { 3567 } __ else_(); {
3547 3568
3548 // logging buffer is full, call the runtime 3569 // logging buffer is full, call the runtime
3549 const TypeFunc *tf = OptoRuntime::g1_wb_pre_Type(); 3570 const TypeFunc *tf = OptoRuntime::g1_wb_pre_Type();
3550 __ make_leaf_call(tf, CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_pre), "g1_wb_pre", orig, tls); 3571 __ make_leaf_call(tf, CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_pre), "g1_wb_pre", pre_val, tls);
3551 } __ end_if(); // (!index) 3572 } __ end_if(); // (!index)
3552 } __ end_if(); // (orig != NULL) 3573 } __ end_if(); // (pre_val != NULL)
3553 } __ end_if(); // (!marking) 3574 } __ end_if(); // (!marking)
3554 3575
3555 // Final sync IdealKit and GraphKit. 3576 // Final sync IdealKit and GraphKit.
3556 final_sync(ideal); 3577 final_sync(ideal);
3557 } 3578 }