comparison src/share/vm/opto/callnode.hpp @ 10278:6f3fd5150b67

6934604: enable parts of EliminateAutoBox by default Summary: Resurrected autobox elimination code and enabled part of it by default. Reviewed-by: roland, twisti
author kvn
date Wed, 08 May 2013 15:08:01 -0700
parents a7114d3d712e
children 9758d9f36299 766fac3395d6
comparison
equal deleted inserted replaced
10277:aabf54ccedb1 10278:6f3fd5150b67
47 class CallRuntimeNode; 47 class CallRuntimeNode;
48 class CallLeafNode; 48 class CallLeafNode;
49 class CallLeafNoFPNode; 49 class CallLeafNoFPNode;
50 class AllocateNode; 50 class AllocateNode;
51 class AllocateArrayNode; 51 class AllocateArrayNode;
52 class BoxLockNode;
52 class LockNode; 53 class LockNode;
53 class UnlockNode; 54 class UnlockNode;
54 class JVMState; 55 class JVMState;
55 class OopMap; 56 class OopMap;
56 class State; 57 class State;
233 uint endoff() const { return _endoff; } 234 uint endoff() const { return _endoff; }
234 uint oopoff() const { return debug_end(); } 235 uint oopoff() const { return debug_end(); }
235 236
236 int loc_size() const { return stkoff() - locoff(); } 237 int loc_size() const { return stkoff() - locoff(); }
237 int stk_size() const { return monoff() - stkoff(); } 238 int stk_size() const { return monoff() - stkoff(); }
238 int arg_size() const { return monoff() - argoff(); }
239 int mon_size() const { return scloff() - monoff(); } 239 int mon_size() const { return scloff() - monoff(); }
240 int scl_size() const { return endoff() - scloff(); } 240 int scl_size() const { return endoff() - scloff(); }
241 241
242 bool is_loc(uint i) const { return locoff() <= i && i < stkoff(); } 242 bool is_loc(uint i) const { return locoff() <= i && i < stkoff(); }
243 bool is_stk(uint i) const { return stkoff() <= i && i < monoff(); } 243 bool is_stk(uint i) const { return stkoff() <= i && i < monoff(); }
296 void set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;} 296 void set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;}
297 297
298 // Miscellaneous utility functions 298 // Miscellaneous utility functions
299 JVMState* clone_deep(Compile* C) const; // recursively clones caller chain 299 JVMState* clone_deep(Compile* C) const; // recursively clones caller chain
300 JVMState* clone_shallow(Compile* C) const; // retains uncloned caller 300 JVMState* clone_shallow(Compile* C) const; // retains uncloned caller
301 void set_map_deep(SafePointNode *map);// reset map for all callers
301 302
302 #ifndef PRODUCT 303 #ifndef PRODUCT
303 void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const; 304 void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const;
304 void dump_spec(outputStream *st) const; 305 void dump_spec(outputStream *st) const;
305 void dump_on(outputStream* st) const; 306 void dump_on(outputStream* st) const;
437 virtual uint match_edge(uint idx) const; 438 virtual uint match_edge(uint idx) const;
438 439
439 static bool needs_polling_address_input(); 440 static bool needs_polling_address_input();
440 441
441 #ifndef PRODUCT 442 #ifndef PRODUCT
442 virtual void dump_spec(outputStream *st) const; 443 virtual void dump_spec(outputStream *st) const;
443 #endif 444 #endif
444 }; 445 };
445 446
446 //------------------------------SafePointScalarObjectNode---------------------- 447 //------------------------------SafePointScalarObjectNode----------------------
447 // A SafePointScalarObjectNode represents the state of a scalarized object 448 // A SafePointScalarObjectNode represents the state of a scalarized object
552 // Are we guaranteed that this node is a safepoint? Not true for leaf calls and 553 // Are we guaranteed that this node is a safepoint? Not true for leaf calls and
553 // for some macro nodes whose expansion does not have a safepoint on the fast path. 554 // for some macro nodes whose expansion does not have a safepoint on the fast path.
554 virtual bool guaranteed_safepoint() { return true; } 555 virtual bool guaranteed_safepoint() { return true; }
555 // For macro nodes, the JVMState gets modified during expansion, so when cloning 556 // For macro nodes, the JVMState gets modified during expansion, so when cloning
556 // the node the JVMState must be cloned. 557 // the node the JVMState must be cloned.
557 virtual void clone_jvms() { } // default is not to clone 558 virtual void clone_jvms(Compile* C) { } // default is not to clone
558 559
559 // Returns true if the call may modify n 560 // Returns true if the call may modify n
560 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase); 561 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase);
561 // Does this node have a use of n other than in debug information? 562 // Does this node have a use of n other than in debug information?
562 bool has_non_debug_use(Node *n); 563 bool has_non_debug_use(Node *n);
563 // Returns the unique CheckCastPP of a call 564 // Returns the unique CheckCastPP of a call
564 // or result projection is there are several CheckCastPP 565 // or result projection is there are several CheckCastPP
565 // or returns NULL if there is no one. 566 // or returns NULL if there is no one.
628 // routines); generates static stub. 629 // routines); generates static stub.
629 class CallStaticJavaNode : public CallJavaNode { 630 class CallStaticJavaNode : public CallJavaNode {
630 virtual uint cmp( const Node &n ) const; 631 virtual uint cmp( const Node &n ) const;
631 virtual uint size_of() const; // Size is bigger 632 virtual uint size_of() const; // Size is bigger
632 public: 633 public:
633 CallStaticJavaNode(const TypeFunc* tf, address addr, ciMethod* method, int bci) 634 CallStaticJavaNode(Compile* C, const TypeFunc* tf, address addr, ciMethod* method, int bci)
634 : CallJavaNode(tf, addr, method, bci), _name(NULL) { 635 : CallJavaNode(tf, addr, method, bci), _name(NULL) {
635 init_class_id(Class_CallStaticJava); 636 init_class_id(Class_CallStaticJava);
637 if (C->eliminate_boxing() && (method != NULL) && method->is_boxing_method()) {
638 init_flags(Flag_is_macro);
639 C->add_macro_node(this);
640 }
641 _is_scalar_replaceable = false;
642 _is_non_escaping = false;
636 } 643 }
637 CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci, 644 CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci,
638 const TypePtr* adr_type) 645 const TypePtr* adr_type)
639 : CallJavaNode(tf, addr, NULL, bci), _name(name) { 646 : CallJavaNode(tf, addr, NULL, bci), _name(name) {
640 init_class_id(Class_CallStaticJava); 647 init_class_id(Class_CallStaticJava);
641 // This node calls a runtime stub, which often has narrow memory effects. 648 // This node calls a runtime stub, which often has narrow memory effects.
642 _adr_type = adr_type; 649 _adr_type = adr_type;
643 } 650 _is_scalar_replaceable = false;
644 const char *_name; // Runtime wrapper name 651 _is_non_escaping = false;
652 }
653 const char *_name; // Runtime wrapper name
654
655 // Result of Escape Analysis
656 bool _is_scalar_replaceable;
657 bool _is_non_escaping;
645 658
646 // If this is an uncommon trap, return the request code, else zero. 659 // If this is an uncommon trap, return the request code, else zero.
647 int uncommon_trap_request() const; 660 int uncommon_trap_request() const;
648 static int extract_uncommon_trap_request(const Node* call); 661 static int extract_uncommon_trap_request(const Node* call);
662
663 bool is_boxing_method() const {
664 return is_macro() && (method() != NULL) && method()->is_boxing_method();
665 }
666 // Later inlining modifies the JVMState, so we need to clone it
667 // when the call node is cloned (because it is macro node).
668 virtual void clone_jvms(Compile* C) {
669 if ((jvms() != NULL) && is_boxing_method()) {
670 set_jvms(jvms()->clone_deep(C));
671 jvms()->set_map_deep(this);
672 }
673 }
649 674
650 virtual int Opcode() const; 675 virtual int Opcode() const;
651 #ifndef PRODUCT 676 #ifndef PRODUCT
652 virtual void dump_spec(outputStream *st) const; 677 virtual void dump_spec(outputStream *st) const;
653 #endif 678 #endif
746 InitialTest, // slow-path test (may be constant) 771 InitialTest, // slow-path test (may be constant)
747 ALength, // array length (or TOP if none) 772 ALength, // array length (or TOP if none)
748 ParmLimit 773 ParmLimit
749 }; 774 };
750 775
751 static const TypeFunc* alloc_type() { 776 static const TypeFunc* alloc_type(const Type* t) {
752 const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms); 777 const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms);
753 fields[AllocSize] = TypeInt::POS; 778 fields[AllocSize] = TypeInt::POS;
754 fields[KlassNode] = TypeInstPtr::NOTNULL; 779 fields[KlassNode] = TypeInstPtr::NOTNULL;
755 fields[InitialTest] = TypeInt::BOOL; 780 fields[InitialTest] = TypeInt::BOOL;
756 fields[ALength] = TypeInt::INT; // length (can be a bad length) 781 fields[ALength] = t; // length (can be a bad length)
757 782
758 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields); 783 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields);
759 784
760 // create result type (range) 785 // create result type (range)
761 fields = TypeTuple::fields(1); 786 fields = TypeTuple::fields(1);
764 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields); 789 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields);
765 790
766 return TypeFunc::make(domain, range); 791 return TypeFunc::make(domain, range);
767 } 792 }
768 793
769 bool _is_scalar_replaceable; // Result of Escape Analysis 794 // Result of Escape Analysis
795 bool _is_scalar_replaceable;
796 bool _is_non_escaping;
770 797
771 virtual uint size_of() const; // Size is bigger 798 virtual uint size_of() const; // Size is bigger
772 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, 799 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
773 Node *size, Node *klass_node, Node *initial_test); 800 Node *size, Node *klass_node, Node *initial_test);
774 // Expansion modifies the JVMState, so we need to clone it 801 // Expansion modifies the JVMState, so we need to clone it
775 virtual void clone_jvms() { 802 virtual void clone_jvms(Compile* C) {
776 set_jvms(jvms()->clone_deep(Compile::current())); 803 if (jvms() != NULL) {
804 set_jvms(jvms()->clone_deep(C));
805 jvms()->set_map_deep(this);
806 }
777 } 807 }
778 virtual int Opcode() const; 808 virtual int Opcode() const;
779 virtual uint ideal_reg() const { return Op_RegP; } 809 virtual uint ideal_reg() const { return Op_RegP; }
780 virtual bool guaranteed_safepoint() { return false; } 810 virtual bool guaranteed_safepoint() { return false; }
781 811
782 // allocations do not modify their arguments 812 // allocations do not modify their arguments
783 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase) { return false;} 813 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
784 814
785 // Pattern-match a possible usage of AllocateNode. 815 // Pattern-match a possible usage of AllocateNode.
786 // Return null if no allocation is recognized. 816 // Return null if no allocation is recognized.
787 // The operand is the pointer produced by the (possible) allocation. 817 // The operand is the pointer produced by the (possible) allocation.
788 // It must be a projection of the Allocate or its subsequent CastPP. 818 // It must be a projection of the Allocate or its subsequent CastPP.
812 // Return the corresponding initialization barrier (or null if none). 842 // Return the corresponding initialization barrier (or null if none).
813 // Walks out edges to find it... 843 // Walks out edges to find it...
814 // (Note: Both InitializeNode::allocation and AllocateNode::initialization 844 // (Note: Both InitializeNode::allocation and AllocateNode::initialization
815 // are defined in graphKit.cpp, which sets up the bidirectional relation.) 845 // are defined in graphKit.cpp, which sets up the bidirectional relation.)
816 InitializeNode* initialization(); 846 InitializeNode* initialization();
817
818 // Return the corresponding storestore barrier (or null if none).
819 // Walks out edges to find it...
820 MemBarStoreStoreNode* storestore();
821 847
822 // Convenience for initialization->maybe_set_complete(phase) 848 // Convenience for initialization->maybe_set_complete(phase)
823 bool maybe_set_complete(PhaseGVN* phase); 849 bool maybe_set_complete(PhaseGVN* phase);
824 }; 850 };
825 851
838 { 864 {
839 init_class_id(Class_AllocateArray); 865 init_class_id(Class_AllocateArray);
840 set_req(AllocateNode::ALength, count_val); 866 set_req(AllocateNode::ALength, count_val);
841 } 867 }
842 virtual int Opcode() const; 868 virtual int Opcode() const;
843 virtual uint size_of() const; // Size is bigger
844 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 869 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
845 870
846 // Dig the length operand out of a array allocation site. 871 // Dig the length operand out of a array allocation site.
847 Node* Ideal_length() { 872 Node* Ideal_length() {
848 return in(AllocateNode::ALength); 873 return in(AllocateNode::ALength);
916 void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); } 941 void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); }
917 void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); } 942 void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); }
918 void set_nested() { _kind = Nested; set_eliminated_lock_counter(); } 943 void set_nested() { _kind = Nested; set_eliminated_lock_counter(); }
919 944
920 // locking does not modify its arguments 945 // locking does not modify its arguments
921 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase){ return false;} 946 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase){ return false;}
922 947
923 #ifndef PRODUCT 948 #ifndef PRODUCT
924 void create_lock_counter(JVMState* s); 949 void create_lock_counter(JVMState* s);
925 NamedCounter* counter() const { return _counter; } 950 NamedCounter* counter() const { return _counter; }
926 #endif 951 #endif
963 } 988 }
964 virtual bool guaranteed_safepoint() { return false; } 989 virtual bool guaranteed_safepoint() { return false; }
965 990
966 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); 991 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
967 // Expansion modifies the JVMState, so we need to clone it 992 // Expansion modifies the JVMState, so we need to clone it
968 virtual void clone_jvms() { 993 virtual void clone_jvms(Compile* C) {
969 set_jvms(jvms()->clone_deep(Compile::current())); 994 if (jvms() != NULL) {
995 set_jvms(jvms()->clone_deep(C));
996 jvms()->set_map_deep(this);
997 }
970 } 998 }
971 999
972 bool is_nested_lock_region(); // Is this Lock nested? 1000 bool is_nested_lock_region(); // Is this Lock nested?
973 }; 1001 };
974 1002