comparison src/share/vm/opto/callnode.cpp @ 10278:6f3fd5150b67

6934604: enable parts of EliminateAutoBox by default Summary: Resurrected autobox elimination code and enabled part of it by default. Reviewed-by: roland, twisti
author kvn
date Wed, 08 May 2013 15:08:01 -0700
parents a7114d3d712e
children 766fac3395d6
comparison
equal deleted inserted replaced
10277:aabf54ccedb1 10278:6f3fd5150b67
521 if (caller() != NULL) caller()->dump_spec(st); 521 if (caller() != NULL) caller()->dump_spec(st);
522 } 522 }
523 523
524 524
525 void JVMState::dump_on(outputStream* st) const { 525 void JVMState::dump_on(outputStream* st) const {
526 if (_map && !((uintptr_t)_map & 1)) { 526 bool print_map = _map && !((uintptr_t)_map & 1) &&
527 ((caller() == NULL) || (caller()->map() != _map));
528 if (print_map) {
527 if (_map->len() > _map->req()) { // _map->has_exceptions() 529 if (_map->len() > _map->req()) { // _map->has_exceptions()
528 Node* ex = _map->in(_map->req()); // _map->next_exception() 530 Node* ex = _map->in(_map->req()); // _map->next_exception()
529 // skip the first one; it's already being printed 531 // skip the first one; it's already being printed
530 while (ex != NULL && ex->len() > ex->req()) { 532 while (ex != NULL && ex->len() > ex->req()) {
531 ex = ex->in(ex->req()); // ex->next_exception() 533 ex = ex->in(ex->req()); // ex->next_exception()
532 ex->dump(1); 534 ex->dump(1);
533 } 535 }
534 } 536 }
535 _map->dump(2); 537 _map->dump(Verbose ? 2 : 1);
538 }
539 if (caller() != NULL) {
540 caller()->dump_on(st);
536 } 541 }
537 st->print("JVMS depth=%d loc=%d stk=%d arg=%d mon=%d scalar=%d end=%d mondepth=%d sp=%d bci=%d reexecute=%s method=", 542 st->print("JVMS depth=%d loc=%d stk=%d arg=%d mon=%d scalar=%d end=%d mondepth=%d sp=%d bci=%d reexecute=%s method=",
538 depth(), locoff(), stkoff(), argoff(), monoff(), scloff(), endoff(), monitor_depth(), sp(), bci(), should_reexecute()?"true":"false"); 543 depth(), locoff(), stkoff(), argoff(), monoff(), scloff(), endoff(), monitor_depth(), sp(), bci(), should_reexecute()?"true":"false");
539 if (_method == NULL) { 544 if (_method == NULL) {
540 st->print_cr("(none)"); 545 st->print_cr("(none)");
543 st->cr(); 548 st->cr();
544 if (bci() >= 0 && bci() < _method->code_size()) { 549 if (bci() >= 0 && bci() < _method->code_size()) {
545 st->print(" bc: "); 550 st->print(" bc: ");
546 _method->print_codes_on(bci(), bci()+1, st); 551 _method->print_codes_on(bci(), bci()+1, st);
547 } 552 }
548 }
549 if (caller() != NULL) {
550 caller()->dump_on(st);
551 } 553 }
552 } 554 }
553 555
554 // Extra way to dump a jvms from the debugger, 556 // Extra way to dump a jvms from the debugger,
555 // to avoid a bug with C++ member function calls. 557 // to avoid a bug with C++ member function calls.
582 assert(n->depth() == depth(), "sanity"); 584 assert(n->depth() == depth(), "sanity");
583 assert(n->debug_depth() == debug_depth(), "sanity"); 585 assert(n->debug_depth() == debug_depth(), "sanity");
584 return n; 586 return n;
585 } 587 }
586 588
589 /**
590 * Reset map for all callers
591 */
592 void JVMState::set_map_deep(SafePointNode* map) {
593 for (JVMState* p = this; p->_caller != NULL; p = p->_caller) {
594 p->set_map(map);
595 }
596 }
597
587 //============================================================================= 598 //=============================================================================
588 uint CallNode::cmp( const Node &n ) const 599 uint CallNode::cmp( const Node &n ) const
589 { return _tf == ((CallNode&)n)._tf && _jvms == ((CallNode&)n)._jvms; } 600 { return _tf == ((CallNode&)n)._tf && _jvms == ((CallNode&)n)._jvms; }
590 #ifndef PRODUCT 601 #ifndef PRODUCT
591 void CallNode::dump_req(outputStream *st) const { 602 void CallNode::dump_req(outputStream *st) const {
661 672
662 // 673 //
663 // Determine whether the call could modify the field of the specified 674 // Determine whether the call could modify the field of the specified
664 // instance at the specified offset. 675 // instance at the specified offset.
665 // 676 //
666 bool CallNode::may_modify(const TypePtr *addr_t, PhaseTransform *phase) { 677 bool CallNode::may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) {
667 const TypeOopPtr *adrInst_t = addr_t->isa_oopptr(); 678 assert((t_oop != NULL), "sanity");
668 679 if (t_oop->is_known_instance()) {
669 // If not an OopPtr or not an instance type, assume the worst. 680 // The instance_id is set only for scalar-replaceable allocations which
670 // Note: currently this method is called only for instance types. 681 // are not passed as arguments according to Escape Analysis.
671 if (adrInst_t == NULL || !adrInst_t->is_known_instance()) { 682 return false;
672 return true; 683 }
673 } 684 if (t_oop->is_ptr_to_boxed_value()) {
674 // The instance_id is set only for scalar-replaceable allocations which 685 ciKlass* boxing_klass = t_oop->klass();
675 // are not passed as arguments according to Escape Analysis. 686 if (is_CallStaticJava() && as_CallStaticJava()->is_boxing_method()) {
676 return false; 687 // Skip unrelated boxing methods.
688 Node* proj = proj_out(TypeFunc::Parms);
689 if ((proj == NULL) || (phase->type(proj)->is_instptr()->klass() != boxing_klass)) {
690 return false;
691 }
692 }
693 if (is_CallJava() && as_CallJava()->method() != NULL) {
694 ciMethod* meth = as_CallJava()->method();
695 if (meth->is_accessor()) {
696 return false;
697 }
698 // May modify (by reflection) if an boxing object is passed
699 // as argument or returned.
700 if (returns_pointer() && (proj_out(TypeFunc::Parms) != NULL)) {
701 Node* proj = proj_out(TypeFunc::Parms);
702 const TypeInstPtr* inst_t = phase->type(proj)->isa_instptr();
703 if ((inst_t != NULL) && (!inst_t->klass_is_exact() ||
704 (inst_t->klass() == boxing_klass))) {
705 return true;
706 }
707 }
708 const TypeTuple* d = tf()->domain();
709 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) {
710 const TypeInstPtr* inst_t = d->field_at(i)->isa_instptr();
711 if ((inst_t != NULL) && (!inst_t->klass_is_exact() ||
712 (inst_t->klass() == boxing_klass))) {
713 return true;
714 }
715 }
716 return false;
717 }
718 }
719 return true;
677 } 720 }
678 721
679 // Does this call have a direct reference to n other than debug information? 722 // Does this call have a direct reference to n other than debug information?
680 bool CallNode::has_non_debug_use(Node *n) { 723 bool CallNode::has_non_debug_use(Node *n) {
681 const TypeTuple * d = tf()->domain(); 724 const TypeTuple * d = tf()->domain();
1018 assert((int)grow_by > 0, "sanity"); 1061 assert((int)grow_by > 0, "sanity");
1019 int monoff = jvms->monoff(); 1062 int monoff = jvms->monoff();
1020 int scloff = jvms->scloff(); 1063 int scloff = jvms->scloff();
1021 int endoff = jvms->endoff(); 1064 int endoff = jvms->endoff();
1022 assert(endoff == (int)req(), "no other states or debug info after me"); 1065 assert(endoff == (int)req(), "no other states or debug info after me");
1066 assert(jvms->scl_size() == 0, "parsed code should not have scalar objects");
1023 Node* top = Compile::current()->top(); 1067 Node* top = Compile::current()->top();
1024 for (uint i = 0; i < grow_by; i++) { 1068 for (uint i = 0; i < grow_by; i++) {
1025 ins_req(monoff, top); 1069 ins_req(monoff, top);
1026 } 1070 }
1027 jvms->set_monoff(monoff + grow_by); 1071 jvms->set_monoff(monoff + grow_by);
1033 // Add a LockNode, which points to both the original BoxLockNode (the 1077 // Add a LockNode, which points to both the original BoxLockNode (the
1034 // stack space for the monitor) and the Object being locked. 1078 // stack space for the monitor) and the Object being locked.
1035 const int MonitorEdges = 2; 1079 const int MonitorEdges = 2;
1036 assert(JVMState::logMonitorEdges == exact_log2(MonitorEdges), "correct MonitorEdges"); 1080 assert(JVMState::logMonitorEdges == exact_log2(MonitorEdges), "correct MonitorEdges");
1037 assert(req() == jvms()->endoff(), "correct sizing"); 1081 assert(req() == jvms()->endoff(), "correct sizing");
1082 assert((jvms()->scl_size() == 0), "parsed code should not have scalar objects");
1038 int nextmon = jvms()->scloff(); 1083 int nextmon = jvms()->scloff();
1039 if (GenerateSynchronizationCode) { 1084 if (GenerateSynchronizationCode) {
1040 add_req(lock->box_node()); 1085 add_req(lock->box_node());
1041 add_req(lock->obj_node()); 1086 add_req(lock->obj_node());
1042 } else { 1087 } else {
1048 jvms()->set_endoff(req()); 1093 jvms()->set_endoff(req());
1049 } 1094 }
1050 1095
1051 void SafePointNode::pop_monitor() { 1096 void SafePointNode::pop_monitor() {
1052 // Delete last monitor from debug info 1097 // Delete last monitor from debug info
1098 assert((jvms()->scl_size() == 0), "parsed code should not have scalar objects");
1053 debug_only(int num_before_pop = jvms()->nof_monitors()); 1099 debug_only(int num_before_pop = jvms()->nof_monitors());
1054 const int MonitorEdges = (1<<JVMState::logMonitorEdges); 1100 const int MonitorEdges = (1<<JVMState::logMonitorEdges);
1055 int scloff = jvms()->scloff(); 1101 int scloff = jvms()->scloff();
1056 int endoff = jvms()->endoff(); 1102 int endoff = jvms()->endoff();
1057 int new_scloff = scloff - MonitorEdges; 1103 int new_scloff = scloff - MonitorEdges;
1152 : CallNode(atype, NULL, TypeRawPtr::BOTTOM) 1198 : CallNode(atype, NULL, TypeRawPtr::BOTTOM)
1153 { 1199 {
1154 init_class_id(Class_Allocate); 1200 init_class_id(Class_Allocate);
1155 init_flags(Flag_is_macro); 1201 init_flags(Flag_is_macro);
1156 _is_scalar_replaceable = false; 1202 _is_scalar_replaceable = false;
1203 _is_non_escaping = false;
1157 Node *topnode = C->top(); 1204 Node *topnode = C->top();
1158 1205
1159 init_req( TypeFunc::Control , ctrl ); 1206 init_req( TypeFunc::Control , ctrl );
1160 init_req( TypeFunc::I_O , abio ); 1207 init_req( TypeFunc::I_O , abio );
1161 init_req( TypeFunc::Memory , mem ); 1208 init_req( TypeFunc::Memory , mem );
1167 init_req( ALength , topnode); 1214 init_req( ALength , topnode);
1168 C->add_macro_node(this); 1215 C->add_macro_node(this);
1169 } 1216 }
1170 1217
1171 //============================================================================= 1218 //=============================================================================
1172 uint AllocateArrayNode::size_of() const { return sizeof(*this); }
1173
1174 Node* AllocateArrayNode::Ideal(PhaseGVN *phase, bool can_reshape) { 1219 Node* AllocateArrayNode::Ideal(PhaseGVN *phase, bool can_reshape) {
1175 if (remove_dead_region(phase, can_reshape)) return this; 1220 if (remove_dead_region(phase, can_reshape)) return this;
1176 // Don't bother trying to transform a dead node 1221 // Don't bother trying to transform a dead node
1177 if (in(0) && in(0)->is_top()) return NULL; 1222 if (in(0) && in(0)->is_top()) return NULL;
1178 1223
1233 if (narrow_length_type != length_type) { 1278 if (narrow_length_type != length_type) {
1234 // Assert one of: 1279 // Assert one of:
1235 // - the narrow_length is 0 1280 // - the narrow_length is 0
1236 // - the narrow_length is not wider than length 1281 // - the narrow_length is not wider than length
1237 assert(narrow_length_type == TypeInt::ZERO || 1282 assert(narrow_length_type == TypeInt::ZERO ||
1283 length_type->is_con() && narrow_length_type->is_con() &&
1284 (narrow_length_type->_hi <= length_type->_lo) ||
1238 (narrow_length_type->_hi <= length_type->_hi && 1285 (narrow_length_type->_hi <= length_type->_hi &&
1239 narrow_length_type->_lo >= length_type->_lo), 1286 narrow_length_type->_lo >= length_type->_lo),
1240 "narrow type must be narrower than length type"); 1287 "narrow type must be narrower than length type");
1241 1288
1242 // Return NULL if new nodes are not allowed 1289 // Return NULL if new nodes are not allowed