Mercurial > hg > graal-compiler
comparison src/share/vm/opto/memnode.hpp @ 4137:04b9a2566eec
Merge with hsx23/hotspot.
author | Thomas Wuerthinger <thomas.wuerthinger@oracle.com> |
---|---|
date | Sat, 17 Dec 2011 21:40:27 +0100 |
parents | 1bd45abaa507 |
children | 1dc233a8c7fe |
comparison
equal
deleted
inserted
replaced
3737:9dc19b7d89a3 | 4137:04b9a2566eec |
---|---|
798 virtual bool depends_only_on_test() const { return false; } | 798 virtual bool depends_only_on_test() const { return false; } |
799 virtual const TypePtr* adr_type() const { return TypeAryPtr::CHARS; } | 799 virtual const TypePtr* adr_type() const { return TypeAryPtr::CHARS; } |
800 virtual uint match_edge(uint idx) const; | 800 virtual uint match_edge(uint idx) const; |
801 virtual uint ideal_reg() const { return Op_RegI; } | 801 virtual uint ideal_reg() const { return Op_RegI; } |
802 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | 802 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); |
803 virtual const Type *Value(PhaseTransform *phase) const; | |
803 }; | 804 }; |
804 | 805 |
805 //------------------------------StrComp------------------------------------- | 806 //------------------------------StrComp------------------------------------- |
806 class StrCompNode: public StrIntrinsicNode { | 807 class StrCompNode: public StrIntrinsicNode { |
807 public: | 808 public: |
877 Node* precedent = NULL); | 878 Node* precedent = NULL); |
878 }; | 879 }; |
879 | 880 |
880 // "Acquire" - no following ref can move before (but earlier refs can | 881 // "Acquire" - no following ref can move before (but earlier refs can |
881 // follow, like an early Load stalled in cache). Requires multi-cpu | 882 // follow, like an early Load stalled in cache). Requires multi-cpu |
882 // visibility. Inserted after a volatile load or FastLock. | 883 // visibility. Inserted after a volatile load. |
883 class MemBarAcquireNode: public MemBarNode { | 884 class MemBarAcquireNode: public MemBarNode { |
884 public: | 885 public: |
885 MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent) | 886 MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent) |
886 : MemBarNode(C, alias_idx, precedent) {} | 887 : MemBarNode(C, alias_idx, precedent) {} |
887 virtual int Opcode() const; | 888 virtual int Opcode() const; |
888 }; | 889 }; |
889 | 890 |
890 // "Release" - no earlier ref can move after (but later refs can move | 891 // "Release" - no earlier ref can move after (but later refs can move |
891 // up, like a speculative pipelined cache-hitting Load). Requires | 892 // up, like a speculative pipelined cache-hitting Load). Requires |
892 // multi-cpu visibility. Inserted before a volatile store or FastUnLock. | 893 // multi-cpu visibility. Inserted before a volatile store. |
893 class MemBarReleaseNode: public MemBarNode { | 894 class MemBarReleaseNode: public MemBarNode { |
894 public: | 895 public: |
895 MemBarReleaseNode(Compile* C, int alias_idx, Node* precedent) | 896 MemBarReleaseNode(Compile* C, int alias_idx, Node* precedent) |
897 : MemBarNode(C, alias_idx, precedent) {} | |
898 virtual int Opcode() const; | |
899 }; | |
900 | |
901 // "Acquire" - no following ref can move before (but earlier refs can | |
902 // follow, like an early Load stalled in cache). Requires multi-cpu | |
903 // visibility. Inserted after a FastLock. | |
904 class MemBarAcquireLockNode: public MemBarNode { | |
905 public: | |
906 MemBarAcquireLockNode(Compile* C, int alias_idx, Node* precedent) | |
907 : MemBarNode(C, alias_idx, precedent) {} | |
908 virtual int Opcode() const; | |
909 }; | |
910 | |
911 // "Release" - no earlier ref can move after (but later refs can move | |
912 // up, like a speculative pipelined cache-hitting Load). Requires | |
913 // multi-cpu visibility. Inserted before a FastUnLock. | |
914 class MemBarReleaseLockNode: public MemBarNode { | |
915 public: | |
916 MemBarReleaseLockNode(Compile* C, int alias_idx, Node* precedent) | |
896 : MemBarNode(C, alias_idx, precedent) {} | 917 : MemBarNode(C, alias_idx, precedent) {} |
897 virtual int Opcode() const; | 918 virtual int Opcode() const; |
898 }; | 919 }; |
899 | 920 |
900 // Ordering between a volatile store and a following volatile load. | 921 // Ordering between a volatile store and a following volatile load. |
920 // Isolation of object setup after an AllocateNode and before next safepoint. | 941 // Isolation of object setup after an AllocateNode and before next safepoint. |
921 // (See comment in memnode.cpp near InitializeNode::InitializeNode for semantics.) | 942 // (See comment in memnode.cpp near InitializeNode::InitializeNode for semantics.) |
922 class InitializeNode: public MemBarNode { | 943 class InitializeNode: public MemBarNode { |
923 friend class AllocateNode; | 944 friend class AllocateNode; |
924 | 945 |
925 bool _is_complete; | 946 enum { |
947 Incomplete = 0, | |
948 Complete = 1, | |
949 WithArraycopy = 2 | |
950 }; | |
951 int _is_complete; | |
926 | 952 |
927 public: | 953 public: |
928 enum { | 954 enum { |
929 Control = TypeFunc::Control, | 955 Control = TypeFunc::Control, |
930 Memory = TypeFunc::Memory, // MergeMem for states affected by this op | 956 Memory = TypeFunc::Memory, // MergeMem for states affected by this op |
954 bool is_non_zero(); | 980 bool is_non_zero(); |
955 | 981 |
956 // An InitializeNode must completed before macro expansion is done. | 982 // An InitializeNode must completed before macro expansion is done. |
957 // Completion requires that the AllocateNode must be followed by | 983 // Completion requires that the AllocateNode must be followed by |
958 // initialization of the new memory to zero, then to any initializers. | 984 // initialization of the new memory to zero, then to any initializers. |
959 bool is_complete() { return _is_complete; } | 985 bool is_complete() { return _is_complete != Incomplete; } |
986 bool is_complete_with_arraycopy() { return (_is_complete & WithArraycopy) != 0; } | |
960 | 987 |
961 // Mark complete. (Must not yet be complete.) | 988 // Mark complete. (Must not yet be complete.) |
962 void set_complete(PhaseGVN* phase); | 989 void set_complete(PhaseGVN* phase); |
990 void set_complete_with_arraycopy() { _is_complete = Complete | WithArraycopy; } | |
963 | 991 |
964 #ifdef ASSERT | 992 #ifdef ASSERT |
965 // ensure all non-degenerate stores are ordered and non-overlapping | 993 // ensure all non-degenerate stores are ordered and non-overlapping |
966 bool stores_are_sane(PhaseTransform* phase); | 994 bool stores_are_sane(PhaseTransform* phase); |
967 #endif //ASSERT | 995 #endif //ASSERT |
1256 public: | 1284 public: |
1257 PrefetchWriteNode(Node *abio, Node *adr) : Node(0,abio,adr) {} | 1285 PrefetchWriteNode(Node *abio, Node *adr) : Node(0,abio,adr) {} |
1258 virtual int Opcode() const; | 1286 virtual int Opcode() const; |
1259 virtual uint ideal_reg() const { return NotAMachineReg; } | 1287 virtual uint ideal_reg() const { return NotAMachineReg; } |
1260 virtual uint match_edge(uint idx) const { return idx==2; } | 1288 virtual uint match_edge(uint idx) const { return idx==2; } |
1289 virtual const Type *bottom_type() const { return Type::ABIO; } | |
1290 }; | |
1291 | |
1292 // Allocation prefetch which may fault, TLAB size have to be adjusted. | |
1293 class PrefetchAllocationNode : public Node { | |
1294 public: | |
1295 PrefetchAllocationNode(Node *mem, Node *adr) : Node(0,mem,adr) {} | |
1296 virtual int Opcode() const; | |
1297 virtual uint ideal_reg() const { return NotAMachineReg; } | |
1298 virtual uint match_edge(uint idx) const { return idx==2; } | |
1261 virtual const Type *bottom_type() const { return ( AllocatePrefetchStyle == 3 ) ? Type::MEMORY : Type::ABIO; } | 1299 virtual const Type *bottom_type() const { return ( AllocatePrefetchStyle == 3 ) ? Type::MEMORY : Type::ABIO; } |
1262 }; | 1300 }; |
1263 | 1301 |
1264 #endif // SHARE_VM_OPTO_MEMNODE_HPP | 1302 #endif // SHARE_VM_OPTO_MEMNODE_HPP |