comparison src/share/vm/opto/compile.cpp @ 3255:5d046bf49ce7

Merge
author johnc
date Thu, 14 Apr 2011 13:45:41 -0700
parents 08eb13460b3a 92add02409c9
children 7889bbcc7f88
comparison
equal deleted inserted replaced
2468:6c97c830fb6f 3255:5d046bf49ce7
627 627
628 // Put top into the hash table ASAP. 628 // Put top into the hash table ASAP.
629 initial_gvn()->transform_no_reclaim(top()); 629 initial_gvn()->transform_no_reclaim(top());
630 630
631 // Set up tf(), start(), and find a CallGenerator. 631 // Set up tf(), start(), and find a CallGenerator.
632 CallGenerator* cg; 632 CallGenerator* cg = NULL;
633 if (is_osr_compilation()) { 633 if (is_osr_compilation()) {
634 const TypeTuple *domain = StartOSRNode::osr_domain(); 634 const TypeTuple *domain = StartOSRNode::osr_domain();
635 const TypeTuple *range = TypeTuple::make_range(method()->signature()); 635 const TypeTuple *range = TypeTuple::make_range(method()->signature());
636 init_tf(TypeFunc::make(domain, range)); 636 init_tf(TypeFunc::make(domain, range));
637 StartNode* s = new (this, 2) StartOSRNode(root(), domain); 637 StartNode* s = new (this, 2) StartOSRNode(root(), domain);
642 // Normal case. 642 // Normal case.
643 init_tf(TypeFunc::make(method())); 643 init_tf(TypeFunc::make(method()));
644 StartNode* s = new (this, 2) StartNode(root(), tf()->domain()); 644 StartNode* s = new (this, 2) StartNode(root(), tf()->domain());
645 initial_gvn()->set_type_bottom(s); 645 initial_gvn()->set_type_bottom(s);
646 init_start(s); 646 init_start(s);
647 float past_uses = method()->interpreter_invocation_count(); 647 if (method()->intrinsic_id() == vmIntrinsics::_Reference_get && UseG1GC) {
648 float expected_uses = past_uses; 648 // With java.lang.ref.reference.get() we must go through the
649 cg = CallGenerator::for_inline(method(), expected_uses); 649 // intrinsic when G1 is enabled - even when get() is the root
650 // method of the compile - so that, if necessary, the value in
651 // the referent field of the reference object gets recorded by
652 // the pre-barrier code.
653 // Specifically, if G1 is enabled, the value in the referent
654 // field is recorded by the G1 SATB pre barrier. This will
655 // result in the referent being marked live and the reference
656 // object removed from the list of discovered references during
657 // reference processing.
658 cg = find_intrinsic(method(), false);
659 }
660 if (cg == NULL) {
661 float past_uses = method()->interpreter_invocation_count();
662 float expected_uses = past_uses;
663 cg = CallGenerator::for_inline(method(), expected_uses);
664 }
650 } 665 }
651 if (failing()) return; 666 if (failing()) return;
652 if (cg == NULL) { 667 if (cg == NULL) {
653 record_method_not_compilable_all_tiers("cannot parse method"); 668 record_method_not_compilable_all_tiers("cannot parse method");
654 return; 669 return;
2037 static bool oop_offset_is_sane(const TypeInstPtr* tp) { 2052 static bool oop_offset_is_sane(const TypeInstPtr* tp) {
2038 ciInstanceKlass *k = tp->klass()->as_instance_klass(); 2053 ciInstanceKlass *k = tp->klass()->as_instance_klass();
2039 // Make sure the offset goes inside the instance layout. 2054 // Make sure the offset goes inside the instance layout.
2040 return k->contains_field_offset(tp->offset()); 2055 return k->contains_field_offset(tp->offset());
2041 // Note that OffsetBot and OffsetTop are very negative. 2056 // Note that OffsetBot and OffsetTop are very negative.
2057 }
2058
2059 // Eliminate trivially redundant StoreCMs and accumulate their
2060 // precedence edges.
2061 static void eliminate_redundant_card_marks(Node* n) {
2062 assert(n->Opcode() == Op_StoreCM, "expected StoreCM");
2063 if (n->in(MemNode::Address)->outcnt() > 1) {
2064 // There are multiple users of the same address so it might be
2065 // possible to eliminate some of the StoreCMs
2066 Node* mem = n->in(MemNode::Memory);
2067 Node* adr = n->in(MemNode::Address);
2068 Node* val = n->in(MemNode::ValueIn);
2069 Node* prev = n;
2070 bool done = false;
2071 // Walk the chain of StoreCMs eliminating ones that match. As
2072 // long as it's a chain of single users then the optimization is
2073 // safe. Eliminating partially redundant StoreCMs would require
2074 // cloning copies down the other paths.
2075 while (mem->Opcode() == Op_StoreCM && mem->outcnt() == 1 && !done) {
2076 if (adr == mem->in(MemNode::Address) &&
2077 val == mem->in(MemNode::ValueIn)) {
2078 // redundant StoreCM
2079 if (mem->req() > MemNode::OopStore) {
2080 // Hasn't been processed by this code yet.
2081 n->add_prec(mem->in(MemNode::OopStore));
2082 } else {
2083 // Already converted to precedence edge
2084 for (uint i = mem->req(); i < mem->len(); i++) {
2085 // Accumulate any precedence edges
2086 if (mem->in(i) != NULL) {
2087 n->add_prec(mem->in(i));
2088 }
2089 }
2090 // Everything above this point has been processed.
2091 done = true;
2092 }
2093 // Eliminate the previous StoreCM
2094 prev->set_req(MemNode::Memory, mem->in(MemNode::Memory));
2095 assert(mem->outcnt() == 0, "should be dead");
2096 mem->disconnect_inputs(NULL);
2097 } else {
2098 prev = mem;
2099 }
2100 mem = prev->in(MemNode::Memory);
2101 }
2102 }
2042 } 2103 }
2043 2104
2044 //------------------------------final_graph_reshaping_impl---------------------- 2105 //------------------------------final_graph_reshaping_impl----------------------
2045 // Implement items 1-5 from final_graph_reshaping below. 2106 // Implement items 1-5 from final_graph_reshaping below.
2046 static void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &frc ) { 2107 static void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &frc ) {
2165 case Op_StoreF: 2226 case Op_StoreF:
2166 case Op_LoadF: 2227 case Op_LoadF:
2167 frc.inc_float_count(); 2228 frc.inc_float_count();
2168 goto handle_mem; 2229 goto handle_mem;
2169 2230
2231 case Op_StoreCM:
2232 {
2233 // Convert OopStore dependence into precedence edge
2234 Node* prec = n->in(MemNode::OopStore);
2235 n->del_req(MemNode::OopStore);
2236 n->add_prec(prec);
2237 eliminate_redundant_card_marks(n);
2238 }
2239
2240 // fall through
2241
2170 case Op_StoreB: 2242 case Op_StoreB:
2171 case Op_StoreC: 2243 case Op_StoreC:
2172 case Op_StoreCM:
2173 case Op_StorePConditional: 2244 case Op_StorePConditional:
2174 case Op_StoreI: 2245 case Op_StoreI:
2175 case Op_StoreL: 2246 case Op_StoreL:
2176 case Op_StoreIConditional: 2247 case Op_StoreIConditional:
2177 case Op_StoreLConditional: 2248 case Op_StoreLConditional: