Mercurial > hg > graal-jvmci-8
comparison src/share/vm/opto/escape.cpp @ 4763:1dc233a8c7fe
7121140: Allocation paths require explicit memory synchronization operations for RMO systems
Summary: adds store store barrier after initialization of header and body of objects.
Reviewed-by: never, kvn
author | roland |
---|---|
date | Tue, 20 Dec 2011 16:56:50 +0100 |
parents | cc81b9c09bbb |
children | e9a5e0a812c8 |
comparison
equal
deleted
inserted
replaced
4762:069ab3f976d3 | 4763:1dc233a8c7fe |
---|---|
1593 } | 1593 } |
1594 | 1594 |
1595 GrowableArray<Node*> alloc_worklist; | 1595 GrowableArray<Node*> alloc_worklist; |
1596 GrowableArray<Node*> addp_worklist; | 1596 GrowableArray<Node*> addp_worklist; |
1597 GrowableArray<Node*> ptr_cmp_worklist; | 1597 GrowableArray<Node*> ptr_cmp_worklist; |
1598 GrowableArray<Node*> storestore_worklist; | |
1598 PhaseGVN* igvn = _igvn; | 1599 PhaseGVN* igvn = _igvn; |
1599 | 1600 |
1600 // Push all useful nodes onto CG list and set their type. | 1601 // Push all useful nodes onto CG list and set their type. |
1601 for( uint next = 0; next < worklist_init.size(); ++next ) { | 1602 for( uint next = 0; next < worklist_init.size(); ++next ) { |
1602 Node* n = worklist_init.at(next); | 1603 Node* n = worklist_init.at(next); |
1616 _mergemem_worklist.append(n->as_MergeMem()); | 1617 _mergemem_worklist.append(n->as_MergeMem()); |
1617 } else if (OptimizePtrCompare && n->is_Cmp() && | 1618 } else if (OptimizePtrCompare && n->is_Cmp() && |
1618 (n->Opcode() == Op_CmpP || n->Opcode() == Op_CmpN)) { | 1619 (n->Opcode() == Op_CmpP || n->Opcode() == Op_CmpN)) { |
1619 // Compare pointers nodes | 1620 // Compare pointers nodes |
1620 ptr_cmp_worklist.append(n); | 1621 ptr_cmp_worklist.append(n); |
1622 } else if (n->is_MemBarStoreStore()) { | |
1623 // Collect all MemBarStoreStore nodes so that depending on the | |
1624 // escape status of the associated Allocate node some of them | |
1625 // may be eliminated. | |
1626 storestore_worklist.append(n); | |
1621 } | 1627 } |
1622 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) { | 1628 for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) { |
1623 Node* m = n->fast_out(i); // Get user | 1629 Node* m = n->fast_out(i); // Get user |
1624 worklist_init.push(m); | 1630 worklist_init.push(m); |
1625 } | 1631 } |
1722 | 1728 |
1723 // 6. Find fields initializing values for not escaped allocations | 1729 // 6. Find fields initializing values for not escaped allocations |
1724 uint alloc_length = alloc_worklist.length(); | 1730 uint alloc_length = alloc_worklist.length(); |
1725 for (uint next = 0; next < alloc_length; ++next) { | 1731 for (uint next = 0; next < alloc_length; ++next) { |
1726 Node* n = alloc_worklist.at(next); | 1732 Node* n = alloc_worklist.at(next); |
1727 if (ptnode_adr(n->_idx)->escape_state() == PointsToNode::NoEscape) { | 1733 PointsToNode::EscapeState es = ptnode_adr(n->_idx)->escape_state(); |
1734 if (es == PointsToNode::NoEscape) { | |
1728 has_non_escaping_obj = true; | 1735 has_non_escaping_obj = true; |
1729 if (n->is_Allocate()) { | 1736 if (n->is_Allocate()) { |
1730 find_init_values(n, &visited, igvn); | 1737 find_init_values(n, &visited, igvn); |
1731 } | 1738 // The object allocated by this Allocate node will never be |
1739 // seen by an other thread. Mark it so that when it is | |
1740 // expanded no MemBarStoreStore is added. | |
1741 n->as_Allocate()->initialization()->set_does_not_escape(); | |
1742 } | |
1743 } else if ((es == PointsToNode::ArgEscape) && n->is_Allocate()) { | |
1744 // Same as above. Mark this Allocate node so that when it is | |
1745 // expanded no MemBarStoreStore is added. | |
1746 n->as_Allocate()->initialization()->set_does_not_escape(); | |
1732 } | 1747 } |
1733 } | 1748 } |
1734 | 1749 |
1735 uint cg_length = cg_worklist.length(); | 1750 uint cg_length = cg_worklist.length(); |
1736 | 1751 |
1870 // cleanup | 1885 // cleanup |
1871 if (_pcmp_neq->outcnt() == 0) | 1886 if (_pcmp_neq->outcnt() == 0) |
1872 igvn->hash_delete(_pcmp_neq); | 1887 igvn->hash_delete(_pcmp_neq); |
1873 if (_pcmp_eq->outcnt() == 0) | 1888 if (_pcmp_eq->outcnt() == 0) |
1874 igvn->hash_delete(_pcmp_eq); | 1889 igvn->hash_delete(_pcmp_eq); |
1890 } | |
1891 | |
1892 // For MemBarStoreStore nodes added in library_call.cpp, check | |
1893 // escape status of associated AllocateNode and optimize out | |
1894 // MemBarStoreStore node if the allocated object never escapes. | |
1895 while (storestore_worklist.length() != 0) { | |
1896 Node *n = storestore_worklist.pop(); | |
1897 MemBarStoreStoreNode *storestore = n ->as_MemBarStoreStore(); | |
1898 Node *alloc = storestore->in(MemBarNode::Precedent)->in(0); | |
1899 assert (alloc->is_Allocate(), "storestore should point to AllocateNode"); | |
1900 PointsToNode::EscapeState es = ptnode_adr(alloc->_idx)->escape_state(); | |
1901 if (es == PointsToNode::NoEscape || es == PointsToNode::ArgEscape) { | |
1902 MemBarNode* mb = MemBarNode::make(C, Op_MemBarCPUOrder, Compile::AliasIdxBot); | |
1903 mb->init_req(TypeFunc::Memory, storestore->in(TypeFunc::Memory)); | |
1904 mb->init_req(TypeFunc::Control, storestore->in(TypeFunc::Control)); | |
1905 | |
1906 _igvn->register_new_node_with_optimizer(mb); | |
1907 _igvn->replace_node(storestore, mb); | |
1908 } | |
1875 } | 1909 } |
1876 | 1910 |
1877 #ifndef PRODUCT | 1911 #ifndef PRODUCT |
1878 if (PrintEscapeAnalysis) { | 1912 if (PrintEscapeAnalysis) { |
1879 dump(); // Dump ConnectionGraph | 1913 dump(); // Dump ConnectionGraph |