comparison src/share/vm/opto/escape.cpp @ 10408:836a62f43af9

Merge with http://hg.openjdk.java.net/hsx/hsx25/hotspot/
author Doug Simon <doug.simon@oracle.com>
date Wed, 19 Jun 2013 10:45:56 +0200
parents f2110083203d
children b800986664f4
comparison
equal deleted inserted replaced
10086:e0fb8a213650 10408:836a62f43af9
1 /* 1 /*
2 * Copyright (c) 2005, 2012, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
61 61
62 bool ConnectionGraph::has_candidates(Compile *C) { 62 bool ConnectionGraph::has_candidates(Compile *C) {
63 // EA brings benefits only when the code has allocations and/or locks which 63 // EA brings benefits only when the code has allocations and/or locks which
64 // are represented by ideal Macro nodes. 64 // are represented by ideal Macro nodes.
65 int cnt = C->macro_count(); 65 int cnt = C->macro_count();
66 for( int i=0; i < cnt; i++ ) { 66 for (int i = 0; i < cnt; i++) {
67 Node *n = C->macro_node(i); 67 Node *n = C->macro_node(i);
68 if ( n->is_Allocate() ) 68 if (n->is_Allocate())
69 return true; 69 return true;
70 if( n->is_Lock() ) { 70 if (n->is_Lock()) {
71 Node* obj = n->as_Lock()->obj_node()->uncast(); 71 Node* obj = n->as_Lock()->obj_node()->uncast();
72 if( !(obj->is_Parm() || obj->is_Con()) ) 72 if (!(obj->is_Parm() || obj->is_Con()))
73 return true; 73 return true;
74 }
75 if (n->is_CallStaticJava() &&
76 n->as_CallStaticJava()->is_boxing_method()) {
77 return true;
74 } 78 }
75 } 79 }
76 return false; 80 return false;
77 } 81 }
78 82
113 DEBUG_ONLY( GrowableArray<Node*> addp_worklist; ) 117 DEBUG_ONLY( GrowableArray<Node*> addp_worklist; )
114 118
115 { Compile::TracePhase t3("connectionGraph", &Phase::_t_connectionGraph, true); 119 { Compile::TracePhase t3("connectionGraph", &Phase::_t_connectionGraph, true);
116 120
117 // 1. Populate Connection Graph (CG) with PointsTo nodes. 121 // 1. Populate Connection Graph (CG) with PointsTo nodes.
118 ideal_nodes.map(C->unique(), NULL); // preallocate space 122 ideal_nodes.map(C->live_nodes(), NULL); // preallocate space
119 // Initialize worklist 123 // Initialize worklist
120 if (C->root() != NULL) { 124 if (C->root() != NULL) {
121 ideal_nodes.push(C->root()); 125 ideal_nodes.push(C->root());
122 } 126 }
123 for( uint next = 0; next < ideal_nodes.size(); ++next ) { 127 for( uint next = 0; next < ideal_nodes.size(); ++next ) {
150 } else if (n->is_MemBarStoreStore()) { 154 } else if (n->is_MemBarStoreStore()) {
151 // Collect all MemBarStoreStore nodes so that depending on the 155 // Collect all MemBarStoreStore nodes so that depending on the
152 // escape status of the associated Allocate node some of them 156 // escape status of the associated Allocate node some of them
153 // may be eliminated. 157 // may be eliminated.
154 storestore_worklist.append(n); 158 storestore_worklist.append(n);
159 } else if (n->is_MemBar() && (n->Opcode() == Op_MemBarRelease) &&
160 (n->req() > MemBarNode::Precedent)) {
161 record_for_optimizer(n);
155 #ifdef ASSERT 162 #ifdef ASSERT
156 } else if(n->is_AddP()) { 163 } else if (n->is_AddP()) {
157 // Collect address nodes for graph verification. 164 // Collect address nodes for graph verification.
158 addp_worklist.append(n); 165 addp_worklist.append(n);
159 #endif 166 #endif
160 } 167 }
161 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) { 168 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
204 // scalar replaceable allocations on alloc_worklist for processing 211 // scalar replaceable allocations on alloc_worklist for processing
205 // in split_unique_types(). 212 // in split_unique_types().
206 int non_escaped_length = non_escaped_worklist.length(); 213 int non_escaped_length = non_escaped_worklist.length();
207 for (int next = 0; next < non_escaped_length; next++) { 214 for (int next = 0; next < non_escaped_length; next++) {
208 JavaObjectNode* ptn = non_escaped_worklist.at(next); 215 JavaObjectNode* ptn = non_escaped_worklist.at(next);
209 if (ptn->escape_state() == PointsToNode::NoEscape && 216 bool noescape = (ptn->escape_state() == PointsToNode::NoEscape);
210 ptn->scalar_replaceable()) { 217 Node* n = ptn->ideal_node();
218 if (n->is_Allocate()) {
219 n->as_Allocate()->_is_non_escaping = noescape;
220 }
221 if (n->is_CallStaticJava()) {
222 n->as_CallStaticJava()->_is_non_escaping = noescape;
223 }
224 if (noescape && ptn->scalar_replaceable()) {
211 adjust_scalar_replaceable_state(ptn); 225 adjust_scalar_replaceable_state(ptn);
212 if (ptn->scalar_replaceable()) { 226 if (ptn->scalar_replaceable()) {
213 alloc_worklist.append(ptn->ideal_node()); 227 alloc_worklist.append(ptn->ideal_node());
214 } 228 }
215 } 229 }
261 C->AliasLevel() >= 3 && EliminateAllocations) { 275 C->AliasLevel() >= 3 && EliminateAllocations) {
262 // Now use the escape information to create unique types for 276 // Now use the escape information to create unique types for
263 // scalar replaceable objects. 277 // scalar replaceable objects.
264 split_unique_types(alloc_worklist); 278 split_unique_types(alloc_worklist);
265 if (C->failing()) return false; 279 if (C->failing()) return false;
266 C->print_method("After Escape Analysis", 2); 280 C->print_method(PHASE_AFTER_EA, 2);
267 281
268 #ifdef ASSERT 282 #ifdef ASSERT
269 } else if (Verbose && (PrintEscapeAnalysis || PrintEliminateAllocations)) { 283 } else if (Verbose && (PrintEscapeAnalysis || PrintEliminateAllocations)) {
270 tty->print("=== No allocations eliminated for "); 284 tty->print("=== No allocations eliminated for ");
271 C->method()->print_short_name(); 285 C->method()->print_short_name();
328 return; // Skip uncommon traps 342 return; // Skip uncommon traps
329 } 343 }
330 // Don't mark as processed since call's arguments have to be processed. 344 // Don't mark as processed since call's arguments have to be processed.
331 delayed_worklist->push(n); 345 delayed_worklist->push(n);
332 // Check if a call returns an object. 346 // Check if a call returns an object.
333 if (n->as_Call()->returns_pointer() && 347 if ((n->as_Call()->returns_pointer() &&
334 n->as_Call()->proj_out(TypeFunc::Parms) != NULL) { 348 n->as_Call()->proj_out(TypeFunc::Parms) != NULL) ||
349 (n->is_CallStaticJava() &&
350 n->as_CallStaticJava()->is_boxing_method())) {
335 add_call_node(n->as_Call()); 351 add_call_node(n->as_Call());
336 } 352 }
337 } 353 }
338 return; 354 return;
339 } 355 }
385 case Op_ConP: 401 case Op_ConP:
386 case Op_ConN: 402 case Op_ConN:
387 case Op_ConNKlass: { 403 case Op_ConNKlass: {
388 // assume all oop constants globally escape except for null 404 // assume all oop constants globally escape except for null
389 PointsToNode::EscapeState es; 405 PointsToNode::EscapeState es;
390 if (igvn->type(n) == TypePtr::NULL_PTR || 406 const Type* t = igvn->type(n);
391 igvn->type(n) == TypeNarrowOop::NULL_PTR) { 407 if (t == TypePtr::NULL_PTR || t == TypeNarrowOop::NULL_PTR) {
392 es = PointsToNode::NoEscape; 408 es = PointsToNode::NoEscape;
393 } else { 409 } else {
394 es = PointsToNode::GlobalEscape; 410 es = PointsToNode::GlobalEscape;
395 } 411 }
396 add_java_object(n, es); 412 add_java_object(n, es);
466 case Op_CompareAndSwapP: 482 case Op_CompareAndSwapP:
467 case Op_CompareAndSwapN: { 483 case Op_CompareAndSwapN: {
468 Node* adr = n->in(MemNode::Address); 484 Node* adr = n->in(MemNode::Address);
469 const Type *adr_type = igvn->type(adr); 485 const Type *adr_type = igvn->type(adr);
470 adr_type = adr_type->make_ptr(); 486 adr_type = adr_type->make_ptr();
487 if (adr_type == NULL) {
488 break; // skip dead nodes
489 }
471 if (adr_type->isa_oopptr() || 490 if (adr_type->isa_oopptr() ||
472 (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) && 491 (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) &&
473 (adr_type == TypeRawPtr::NOTNULL && 492 (adr_type == TypeRawPtr::NOTNULL &&
474 adr->in(AddPNode::Address)->is_Proj() && 493 adr->in(AddPNode::Address)->is_Proj() &&
475 adr->in(AddPNode::Address)->in(0)->is_Allocate())) { 494 adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
658 case Op_CompareAndSwapP: 677 case Op_CompareAndSwapP:
659 case Op_CompareAndSwapN: 678 case Op_CompareAndSwapN:
660 case Op_GetAndSetP: 679 case Op_GetAndSetP:
661 case Op_GetAndSetN: { 680 case Op_GetAndSetN: {
662 Node* adr = n->in(MemNode::Address); 681 Node* adr = n->in(MemNode::Address);
663 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN) {
664 const Type* t = _igvn->type(n);
665 if (t->make_ptr() != NULL) {
666 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL);
667 }
668 }
669 const Type *adr_type = _igvn->type(adr); 682 const Type *adr_type = _igvn->type(adr);
670 adr_type = adr_type->make_ptr(); 683 adr_type = adr_type->make_ptr();
684 #ifdef ASSERT
685 if (adr_type == NULL) {
686 n->dump(1);
687 assert(adr_type != NULL, "dead node should not be on list");
688 break;
689 }
690 #endif
691 if (opcode == Op_GetAndSetP || opcode == Op_GetAndSetN) {
692 add_local_var_and_edge(n, PointsToNode::NoEscape, adr, NULL);
693 }
671 if (adr_type->isa_oopptr() || 694 if (adr_type->isa_oopptr() ||
672 (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) && 695 (opcode == Op_StoreP || opcode == Op_StoreN || opcode == Op_StoreNKlass) &&
673 (adr_type == TypeRawPtr::NOTNULL && 696 (adr_type == TypeRawPtr::NOTNULL &&
674 adr->in(AddPNode::Address)->is_Proj() && 697 adr->in(AddPNode::Address)->is_Proj() &&
675 adr->in(AddPNode::Address)->in(0)->is_Allocate())) { 698 adr->in(AddPNode::Address)->in(0)->is_Allocate())) {
795 const char* name = call->as_CallStaticJava()->_name; 818 const char* name = call->as_CallStaticJava()->_name;
796 assert(strncmp(name, "_multianewarray", 15) == 0, "TODO: add failed case check"); 819 assert(strncmp(name, "_multianewarray", 15) == 0, "TODO: add failed case check");
797 // Returns a newly allocated unescaped object. 820 // Returns a newly allocated unescaped object.
798 add_java_object(call, PointsToNode::NoEscape); 821 add_java_object(call, PointsToNode::NoEscape);
799 ptnode_adr(call_idx)->set_scalar_replaceable(false); 822 ptnode_adr(call_idx)->set_scalar_replaceable(false);
823 } else if (meth->is_boxing_method()) {
824 // Returns boxing object
825 PointsToNode::EscapeState es;
826 vmIntrinsics::ID intr = meth->intrinsic_id();
827 if (intr == vmIntrinsics::_floatValue || intr == vmIntrinsics::_doubleValue) {
828 // It does not escape if object is always allocated.
829 es = PointsToNode::NoEscape;
830 } else {
831 // It escapes globally if object could be loaded from cache.
832 es = PointsToNode::GlobalEscape;
833 }
834 add_java_object(call, es);
800 } else { 835 } else {
801 BCEscapeAnalyzer* call_analyzer = meth->get_bcea(); 836 BCEscapeAnalyzer* call_analyzer = meth->get_bcea();
802 call_analyzer->copy_dependencies(_compile->dependencies()); 837 call_analyzer->copy_dependencies(_compile->dependencies());
803 if (call_analyzer->is_return_allocated()) { 838 if (call_analyzer->is_return_allocated()) {
804 // Returns a newly allocated unescaped object, simply 839 // Returns a newly allocated unescaped object, simply
941 #ifdef ASSERT 976 #ifdef ASSERT
942 const char* name = call->as_CallStaticJava()->_name; 977 const char* name = call->as_CallStaticJava()->_name;
943 assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only"); 978 assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only");
944 #endif 979 #endif
945 ciMethod* meth = call->as_CallJava()->method(); 980 ciMethod* meth = call->as_CallJava()->method();
981 if ((meth != NULL) && meth->is_boxing_method()) {
982 break; // Boxing methods do not modify any oops.
983 }
946 BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL; 984 BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL;
947 // fall-through if not a Java method or no analyzer information 985 // fall-through if not a Java method or no analyzer information
948 if (call_analyzer != NULL) { 986 if (call_analyzer != NULL) {
949 PointsToNode* call_ptn = ptnode_adr(call->_idx); 987 PointsToNode* call_ptn = ptnode_adr(call->_idx);
950 const TypeTuple* d = call->tf()->domain(); 988 const TypeTuple* d = call->tf()->domain();
1789 jobj2 != NULL && jobj2 != phantom_obj && 1827 jobj2 != NULL && jobj2 != phantom_obj &&
1790 jobj1->ideal_node()->is_Con() && 1828 jobj1->ideal_node()->is_Con() &&
1791 jobj2->ideal_node()->is_Con()) { 1829 jobj2->ideal_node()->is_Con()) {
1792 // Klass or String constants compare. Need to be careful with 1830 // Klass or String constants compare. Need to be careful with
1793 // compressed pointers - compare types of ConN and ConP instead of nodes. 1831 // compressed pointers - compare types of ConN and ConP instead of nodes.
1794 const Type* t1 = jobj1->ideal_node()->bottom_type()->make_ptr(); 1832 const Type* t1 = jobj1->ideal_node()->get_ptr_type();
1795 const Type* t2 = jobj2->ideal_node()->bottom_type()->make_ptr(); 1833 const Type* t2 = jobj2->ideal_node()->get_ptr_type();
1796 assert(t1 != NULL && t2 != NULL, "sanity");
1797 if (t1->make_ptr() == t2->make_ptr()) { 1834 if (t1->make_ptr() == t2->make_ptr()) {
1798 return _pcmp_eq; 1835 return _pcmp_eq;
1799 } else { 1836 } else {
1800 return _pcmp_neq; 1837 return _pcmp_neq;
1801 } 1838 }
2163 } 2200 }
2164 Node* uncast_base = base->uncast(); 2201 Node* uncast_base = base->uncast();
2165 int opcode = uncast_base->Opcode(); 2202 int opcode = uncast_base->Opcode();
2166 assert(opcode == Op_ConP || opcode == Op_ThreadLocal || 2203 assert(opcode == Op_ConP || opcode == Op_ThreadLocal ||
2167 opcode == Op_CastX2P || uncast_base->is_DecodeNarrowPtr() || 2204 opcode == Op_CastX2P || uncast_base->is_DecodeNarrowPtr() ||
2168 (uncast_base->is_Mem() && uncast_base->bottom_type() == TypeRawPtr::NOTNULL) || 2205 (uncast_base->is_Mem() && (uncast_base->bottom_type()->isa_rawptr() != NULL)) ||
2169 (uncast_base->is_Proj() && uncast_base->in(0)->is_Allocate()), "sanity"); 2206 (uncast_base->is_Proj() && uncast_base->in(0)->is_Allocate()), "sanity");
2170 } 2207 }
2171 return base; 2208 return base;
2172 } 2209 }
2173 2210
2742 if (alloc->is_Allocate()) { 2779 if (alloc->is_Allocate()) {
2743 // Set the scalar_replaceable flag for allocation 2780 // Set the scalar_replaceable flag for allocation
2744 // so it could be eliminated if it has no uses. 2781 // so it could be eliminated if it has no uses.
2745 alloc->as_Allocate()->_is_scalar_replaceable = true; 2782 alloc->as_Allocate()->_is_scalar_replaceable = true;
2746 } 2783 }
2784 if (alloc->is_CallStaticJava()) {
2785 // Set the scalar_replaceable flag for boxing method
2786 // so it could be eliminated if it has no uses.
2787 alloc->as_CallStaticJava()->_is_scalar_replaceable = true;
2788 }
2747 continue; 2789 continue;
2748 } 2790 }
2749 if (!n->is_CheckCastPP()) { // not unique CheckCastPP. 2791 if (!n->is_CheckCastPP()) { // not unique CheckCastPP.
2750 assert(!alloc->is_Allocate(), "allocation should have unique type"); 2792 assert(!alloc->is_Allocate(), "allocation should have unique type");
2751 continue; 2793 continue;
2779 } 2821 }
2780 if (alloc->is_Allocate()) { 2822 if (alloc->is_Allocate()) {
2781 // Set the scalar_replaceable flag for allocation 2823 // Set the scalar_replaceable flag for allocation
2782 // so it could be eliminated. 2824 // so it could be eliminated.
2783 alloc->as_Allocate()->_is_scalar_replaceable = true; 2825 alloc->as_Allocate()->_is_scalar_replaceable = true;
2826 }
2827 if (alloc->is_CallStaticJava()) {
2828 // Set the scalar_replaceable flag for boxing method
2829 // so it could be eliminated.
2830 alloc->as_CallStaticJava()->_is_scalar_replaceable = true;
2784 } 2831 }
2785 set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state 2832 set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state
2786 // in order for an object to be scalar-replaceable, it must be: 2833 // in order for an object to be scalar-replaceable, it must be:
2787 // - a direct allocation (not a call returning an object) 2834 // - a direct allocation (not a call returning an object)
2788 // - non-escaping 2835 // - non-escaping
2909 Node *use = n->fast_out(i); 2956 Node *use = n->fast_out(i);
2910 if(use->is_Mem() && use->in(MemNode::Address) == n) { 2957 if(use->is_Mem() && use->in(MemNode::Address) == n) {
2911 // Load/store to instance's field 2958 // Load/store to instance's field
2912 memnode_worklist.append_if_missing(use); 2959 memnode_worklist.append_if_missing(use);
2913 } else if (use->is_MemBar()) { 2960 } else if (use->is_MemBar()) {
2914 memnode_worklist.append_if_missing(use); 2961 if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge
2962 memnode_worklist.append_if_missing(use);
2963 }
2915 } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes 2964 } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes
2916 Node* addp2 = find_second_addp(use, n); 2965 Node* addp2 = find_second_addp(use, n);
2917 if (addp2 != NULL) { 2966 if (addp2 != NULL) {
2918 alloc_worklist.append_if_missing(addp2); 2967 alloc_worklist.append_if_missing(addp2);
2919 } 2968 }
3026 } else if (use->is_Mem() && use->in(MemNode::Memory) == n) { 3075 } else if (use->is_Mem() && use->in(MemNode::Memory) == n) {
3027 if (use->Opcode() == Op_StoreCM) // Ignore cardmark stores 3076 if (use->Opcode() == Op_StoreCM) // Ignore cardmark stores
3028 continue; 3077 continue;
3029 memnode_worklist.append_if_missing(use); 3078 memnode_worklist.append_if_missing(use);
3030 } else if (use->is_MemBar()) { 3079 } else if (use->is_MemBar()) {
3031 memnode_worklist.append_if_missing(use); 3080 if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge
3081 memnode_worklist.append_if_missing(use);
3082 }
3032 #ifdef ASSERT 3083 #ifdef ASSERT
3033 } else if(use->is_Mem()) { 3084 } else if(use->is_Mem()) {
3034 assert(use->in(MemNode::Memory) != n, "EA: missing memory path"); 3085 assert(use->in(MemNode::Memory) != n, "EA: missing memory path");
3035 } else if (use->is_MergeMem()) { 3086 } else if (use->is_MergeMem()) {
3036 assert(_mergemem_worklist.contains(use->as_MergeMem()), "EA: missing MergeMem node in the worklist"); 3087 assert(_mergemem_worklist.contains(use->as_MergeMem()), "EA: missing MergeMem node in the worklist");
3262 for (int i = 0; i < ptnodes_length; i++) { 3313 for (int i = 0; i < ptnodes_length; i++) {
3263 PointsToNode *ptn = ptnodes_worklist.at(i); 3314 PointsToNode *ptn = ptnodes_worklist.at(i);
3264 if (ptn == NULL || !ptn->is_JavaObject()) 3315 if (ptn == NULL || !ptn->is_JavaObject())
3265 continue; 3316 continue;
3266 PointsToNode::EscapeState es = ptn->escape_state(); 3317 PointsToNode::EscapeState es = ptn->escape_state();
3267 if (ptn->ideal_node()->is_Allocate() && (es == PointsToNode::NoEscape || Verbose)) { 3318 if ((es != PointsToNode::NoEscape) && !Verbose) {
3319 continue;
3320 }
3321 Node* n = ptn->ideal_node();
3322 if (n->is_Allocate() || (n->is_CallStaticJava() &&
3323 n->as_CallStaticJava()->is_boxing_method())) {
3268 if (first) { 3324 if (first) {
3269 tty->cr(); 3325 tty->cr();
3270 tty->print("======== Connection graph for "); 3326 tty->print("======== Connection graph for ");
3271 _compile->method()->print_short_name(); 3327 _compile->method()->print_short_name();
3272 tty->cr(); 3328 tty->cr();