Mercurial > hg > truffle
comparison src/share/vm/opto/escape.cpp @ 163:885ed790ecf0
6695810: null oop passed to encode_heap_oop_not_null
Summary: fix several problems in C2 related to Escape Analysis and Compressed Oops.
Reviewed-by: never, jrose
author | kvn |
---|---|
date | Wed, 21 May 2008 10:45:07 -0700 |
parents | b130b98db9cf |
children | c436414a719e |
comparison
equal
deleted
inserted
replaced
162:8aa010f60e0f | 163:885ed790ecf0 |
---|---|
886 n->raise_bottom_type(tinst); | 886 n->raise_bottom_type(tinst); |
887 igvn->hash_insert(n); | 887 igvn->hash_insert(n); |
888 record_for_optimizer(n); | 888 record_for_optimizer(n); |
889 if (alloc->is_Allocate() && ptn->_scalar_replaceable && | 889 if (alloc->is_Allocate() && ptn->_scalar_replaceable && |
890 (t->isa_instptr() || t->isa_aryptr())) { | 890 (t->isa_instptr() || t->isa_aryptr())) { |
891 | |
892 // First, put on the worklist all Field edges from Connection Graph | |
893 // which is more accurate then putting immediate users from Ideal Graph. | |
894 for (uint e = 0; e < ptn->edge_count(); e++) { | |
895 Node *use = _nodes->adr_at(ptn->edge_target(e))->_node; | |
896 assert(ptn->edge_type(e) == PointsToNode::FieldEdge && use->is_AddP(), | |
897 "only AddP nodes are Field edges in CG"); | |
898 if (use->outcnt() > 0) { // Don't process dead nodes | |
899 Node* addp2 = find_second_addp(use, use->in(AddPNode::Base)); | |
900 if (addp2 != NULL) { | |
901 assert(alloc->is_AllocateArray(),"array allocation was expected"); | |
902 alloc_worklist.append_if_missing(addp2); | |
903 } | |
904 alloc_worklist.append_if_missing(use); | |
905 } | |
906 } | |
907 | |
891 // An allocation may have an Initialize which has raw stores. Scan | 908 // An allocation may have an Initialize which has raw stores. Scan |
892 // the users of the raw allocation result and push AddP users | 909 // the users of the raw allocation result and push AddP users |
893 // on alloc_worklist. | 910 // on alloc_worklist. |
894 Node *raw_result = alloc->proj_out(TypeFunc::Parms); | 911 Node *raw_result = alloc->proj_out(TypeFunc::Parms); |
895 assert (raw_result != NULL, "must have an allocation result"); | 912 assert (raw_result != NULL, "must have an allocation result"); |
917 Node *base = get_map(elem); // CheckCastPP node | 934 Node *base = get_map(elem); // CheckCastPP node |
918 split_AddP(n, base, igvn); | 935 split_AddP(n, base, igvn); |
919 tinst = igvn->type(base)->isa_oopptr(); | 936 tinst = igvn->type(base)->isa_oopptr(); |
920 } else if (n->is_Phi() || | 937 } else if (n->is_Phi() || |
921 n->is_CheckCastPP() || | 938 n->is_CheckCastPP() || |
939 n->Opcode() == Op_EncodeP || | |
940 n->Opcode() == Op_DecodeN || | |
922 (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) { | 941 (n->is_ConstraintCast() && n->Opcode() == Op_CastPP)) { |
923 if (visited.test_set(n->_idx)) { | 942 if (visited.test_set(n->_idx)) { |
924 assert(n->is_Phi(), "loops only through Phi's"); | 943 assert(n->is_Phi(), "loops only through Phi's"); |
925 continue; // already processed | 944 continue; // already processed |
926 } | 945 } |
933 Node *val = get_map(elem); // CheckCastPP node | 952 Node *val = get_map(elem); // CheckCastPP node |
934 TypeNode *tn = n->as_Type(); | 953 TypeNode *tn = n->as_Type(); |
935 tinst = igvn->type(val)->isa_oopptr(); | 954 tinst = igvn->type(val)->isa_oopptr(); |
936 assert(tinst != NULL && tinst->is_instance() && | 955 assert(tinst != NULL && tinst->is_instance() && |
937 tinst->instance_id() == elem , "instance type expected."); | 956 tinst->instance_id() == elem , "instance type expected."); |
938 const TypeOopPtr *tn_t = igvn->type(tn)->isa_oopptr(); | 957 |
958 const TypeOopPtr *tn_t = NULL; | |
959 const Type *tn_type = igvn->type(tn); | |
960 if (tn_type->isa_narrowoop()) { | |
961 tn_t = tn_type->is_narrowoop()->make_oopptr()->isa_oopptr(); | |
962 } else { | |
963 tn_t = tn_type->isa_oopptr(); | |
964 } | |
939 | 965 |
940 if (tn_t != NULL && | 966 if (tn_t != NULL && |
941 tinst->cast_to_instance(TypeOopPtr::UNKNOWN_INSTANCE)->higher_equal(tn_t)) { | 967 tinst->cast_to_instance(TypeOopPtr::UNKNOWN_INSTANCE)->higher_equal(tn_t)) { |
968 if (tn_type->isa_narrowoop()) { | |
969 tn_type = tinst->make_narrowoop(); | |
970 } else { | |
971 tn_type = tinst; | |
972 } | |
942 igvn->hash_delete(tn); | 973 igvn->hash_delete(tn); |
943 igvn->set_type(tn, tinst); | 974 igvn->set_type(tn, tn_type); |
944 tn->set_type(tinst); | 975 tn->set_type(tn_type); |
945 igvn->hash_insert(tn); | 976 igvn->hash_insert(tn); |
946 record_for_optimizer(n); | 977 record_for_optimizer(n); |
947 } | 978 } |
948 } | 979 } |
949 } else { | 980 } else { |
976 alloc_worklist.append_if_missing(addp2); | 1007 alloc_worklist.append_if_missing(addp2); |
977 } | 1008 } |
978 alloc_worklist.append_if_missing(use); | 1009 alloc_worklist.append_if_missing(use); |
979 } else if (use->is_Phi() || | 1010 } else if (use->is_Phi() || |
980 use->is_CheckCastPP() || | 1011 use->is_CheckCastPP() || |
1012 use->Opcode() == Op_EncodeP || | |
1013 use->Opcode() == Op_DecodeN || | |
981 (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) { | 1014 (use->is_ConstraintCast() && use->Opcode() == Op_CastPP)) { |
982 alloc_worklist.append_if_missing(use); | 1015 alloc_worklist.append_if_missing(use); |
983 } | 1016 } |
984 } | 1017 } |
985 | 1018 |
1197 } | 1230 } |
1198 } | 1231 } |
1199 | 1232 |
1200 void ConnectionGraph::compute_escape() { | 1233 void ConnectionGraph::compute_escape() { |
1201 | 1234 |
1202 // 1. Populate Connection Graph with Ideal nodes. | 1235 // 1. Populate Connection Graph (CG) with Ideal nodes. |
1203 | 1236 |
1204 Unique_Node_List worklist_init; | 1237 Unique_Node_List worklist_init; |
1205 worklist_init.map(_compile->unique(), NULL); // preallocate space | 1238 worklist_init.map(_compile->unique(), NULL); // preallocate space |
1206 | 1239 |
1207 // Initialize worklist | 1240 // Initialize worklist |
1279 Node *n = ptn->_node; | 1312 Node *n = ptn->_node; |
1280 if (nt == PointsToNode::LocalVar || nt == PointsToNode::Field) { | 1313 if (nt == PointsToNode::LocalVar || nt == PointsToNode::Field) { |
1281 remove_deferred(ni, &deferred_edges, &visited); | 1314 remove_deferred(ni, &deferred_edges, &visited); |
1282 if (n->is_AddP()) { | 1315 if (n->is_AddP()) { |
1283 // If this AddP computes an address which may point to more that one | 1316 // If this AddP computes an address which may point to more that one |
1284 // object, nothing the address points to can be scalar replaceable. | 1317 // object or more then one field (array's element), nothing the address |
1318 // points to can be scalar replaceable. | |
1285 Node *base = get_addp_base(n); | 1319 Node *base = get_addp_base(n); |
1286 ptset.Clear(); | 1320 ptset.Clear(); |
1287 PointsTo(ptset, base, igvn); | 1321 PointsTo(ptset, base, igvn); |
1288 if (ptset.Size() > 1) { | 1322 if (ptset.Size() > 1 || |
1323 (ptset.Size() != 0 && ptn->offset() == Type::OffsetBot)) { | |
1289 for( VectorSetI j(&ptset); j.test(); ++j ) { | 1324 for( VectorSetI j(&ptset); j.test(); ++j ) { |
1290 uint pt = j.elem; | 1325 uint pt = j.elem; |
1291 ptnode_adr(pt)->_scalar_replaceable = false; | 1326 ptnode_adr(pt)->_scalar_replaceable = false; |
1292 } | 1327 } |
1293 } | 1328 } |
1977 case Op_ConP: | 2012 case Op_ConP: |
1978 { | 2013 { |
1979 assert(false, "Op_ConP"); | 2014 assert(false, "Op_ConP"); |
1980 break; | 2015 break; |
1981 } | 2016 } |
2017 case Op_ConN: | |
2018 { | |
2019 assert(false, "Op_ConN"); | |
2020 break; | |
2021 } | |
1982 case Op_CreateEx: | 2022 case Op_CreateEx: |
1983 { | 2023 { |
1984 assert(false, "Op_CreateEx"); | 2024 assert(false, "Op_CreateEx"); |
1985 break; | 2025 break; |
1986 } | 2026 } |