Mercurial > hg > graal-jvmci-8
comparison src/share/vm/opto/macro.cpp @ 73:a8880a78d355
6259129: (Escape Analysis) scalar replacement for not escaping objects
Summary: Use scalar replacement with EA to remove allocations for objects which do not escape the compiled method.
Reviewed-by: rasbold, never, jrose
author | kvn |
---|---|
date | Thu, 20 Mar 2008 13:51:55 -0700 |
parents | 6dbf1a175d6b |
children | ba764ed4b6f2 |
comparison
equal
deleted
inserted
replaced
72:f705f25597eb | 73:a8880a78d355 |
---|---|
177 default: | 177 default: |
178 assert(false, "unexpected projection from allocation node."); | 178 assert(false, "unexpected projection from allocation node."); |
179 } | 179 } |
180 } | 180 } |
181 | 181 |
182 } | |
183 | |
184 // Eliminate a card mark sequence. p2x is a ConvP2XNode | |
185 void PhaseMacroExpand::eliminate_card_mark(Node *p2x) { | |
186 assert(p2x->Opcode() == Op_CastP2X, "ConvP2XNode required"); | |
187 Node *shift = p2x->unique_out(); | |
188 Node *addp = shift->unique_out(); | |
189 for (DUIterator_Last jmin, j = addp->last_outs(jmin); j >= jmin; --j) { | |
190 Node *st = addp->last_out(j); | |
191 assert(st->is_Store(), "store required"); | |
192 _igvn.replace_node(st, st->in(MemNode::Memory)); | |
193 } | |
194 } | |
195 | |
196 // Search for a memory operation for the specified memory slice. | |
197 static Node *scan_mem_chain(Node *mem, int alias_idx, int offset, Node *start_mem, Node *alloc) { | |
198 Node *orig_mem = mem; | |
199 Node *alloc_mem = alloc->in(TypeFunc::Memory); | |
200 while (true) { | |
201 if (mem == alloc_mem || mem == start_mem ) { | |
202 return mem; // hit one of our sentinals | |
203 } else if (mem->is_MergeMem()) { | |
204 mem = mem->as_MergeMem()->memory_at(alias_idx); | |
205 } else if (mem->is_Proj() && mem->as_Proj()->_con == TypeFunc::Memory) { | |
206 Node *in = mem->in(0); | |
207 // we can safely skip over safepoints, calls, locks and membars because we | |
208 // already know that the object is safe to eliminate. | |
209 if (in->is_Initialize() && in->as_Initialize()->allocation() == alloc) { | |
210 return in; | |
211 } else if (in->is_Call() || in->is_MemBar()) { | |
212 mem = in->in(TypeFunc::Memory); | |
213 } else { | |
214 assert(false, "unexpected projection"); | |
215 } | |
216 } else if (mem->is_Store()) { | |
217 const TypePtr* atype = mem->as_Store()->adr_type(); | |
218 int adr_idx = Compile::current()->get_alias_index(atype); | |
219 if (adr_idx == alias_idx) { | |
220 assert(atype->isa_oopptr(), "address type must be oopptr"); | |
221 int adr_offset = atype->offset(); | |
222 uint adr_iid = atype->is_oopptr()->instance_id(); | |
223 // Array elements references have the same alias_idx | |
224 // but different offset and different instance_id. | |
225 if (adr_offset == offset && adr_iid == alloc->_idx) | |
226 return mem; | |
227 } else { | |
228 assert(adr_idx == Compile::AliasIdxRaw, "address must match or be raw"); | |
229 } | |
230 mem = mem->in(MemNode::Memory); | |
231 } else { | |
232 return mem; | |
233 } | |
234 if (mem == orig_mem) | |
235 return mem; | |
236 } | |
237 } | |
238 | |
239 // | |
240 // Given a Memory Phi, compute a value Phi containing the values from stores | |
241 // on the input paths. | |
242 // Note: this function is recursive, its depth is limied by the "level" argument | |
243 // Returns the computed Phi, or NULL if it cannot compute it. | |
244 Node *PhaseMacroExpand::value_from_mem_phi(Node *mem, BasicType ft, const Type *phi_type, const TypeOopPtr *adr_t, Node *alloc, int level) { | |
245 | |
246 if (level <= 0) { | |
247 return NULL; | |
248 } | |
249 int alias_idx = C->get_alias_index(adr_t); | |
250 int offset = adr_t->offset(); | |
251 int instance_id = adr_t->instance_id(); | |
252 | |
253 Node *start_mem = C->start()->proj_out(TypeFunc::Memory); | |
254 Node *alloc_mem = alloc->in(TypeFunc::Memory); | |
255 | |
256 uint length = mem->req(); | |
257 GrowableArray <Node *> values(length, length, NULL); | |
258 | |
259 for (uint j = 1; j < length; j++) { | |
260 Node *in = mem->in(j); | |
261 if (in == NULL || in->is_top()) { | |
262 values.at_put(j, in); | |
263 } else { | |
264 Node *val = scan_mem_chain(in, alias_idx, offset, start_mem, alloc); | |
265 if (val == start_mem || val == alloc_mem) { | |
266 // hit a sentinel, return appropriate 0 value | |
267 values.at_put(j, _igvn.zerocon(ft)); | |
268 continue; | |
269 } | |
270 if (val->is_Initialize()) { | |
271 val = val->as_Initialize()->find_captured_store(offset, type2aelembytes(ft), &_igvn); | |
272 } | |
273 if (val == NULL) { | |
274 return NULL; // can't find a value on this path | |
275 } | |
276 if (val == mem) { | |
277 values.at_put(j, mem); | |
278 } else if (val->is_Store()) { | |
279 values.at_put(j, val->in(MemNode::ValueIn)); | |
280 } else if(val->is_Proj() && val->in(0) == alloc) { | |
281 values.at_put(j, _igvn.zerocon(ft)); | |
282 } else if (val->is_Phi()) { | |
283 // Check if an appropriate node already exists. | |
284 Node* region = val->in(0); | |
285 Node* old_phi = NULL; | |
286 for (DUIterator_Fast kmax, k = region->fast_outs(kmax); k < kmax; k++) { | |
287 Node* phi = region->fast_out(k); | |
288 if (phi->is_Phi() && phi != val && | |
289 phi->as_Phi()->is_same_inst_field(phi_type, instance_id, alias_idx, offset)) { | |
290 old_phi = phi; | |
291 break; | |
292 } | |
293 } | |
294 if (old_phi == NULL) { | |
295 val = value_from_mem_phi(val, ft, phi_type, adr_t, alloc, level-1); | |
296 if (val == NULL) { | |
297 return NULL; | |
298 } | |
299 values.at_put(j, val); | |
300 } else { | |
301 values.at_put(j, old_phi); | |
302 } | |
303 } else { | |
304 return NULL; // unknown node on this path | |
305 } | |
306 } | |
307 } | |
308 // create a new Phi for the value | |
309 PhiNode *phi = new (C, length) PhiNode(mem->in(0), phi_type, NULL, instance_id, alias_idx, offset); | |
310 for (uint j = 1; j < length; j++) { | |
311 if (values.at(j) == mem) { | |
312 phi->init_req(j, phi); | |
313 } else { | |
314 phi->init_req(j, values.at(j)); | |
315 } | |
316 } | |
317 transform_later(phi); | |
318 return phi; | |
319 } | |
320 | |
321 // Search the last value stored into the object's field. | |
322 Node *PhaseMacroExpand::value_from_mem(Node *sfpt_mem, BasicType ft, const Type *ftype, const TypeOopPtr *adr_t, Node *alloc) { | |
323 assert(adr_t->is_instance_field(), "instance required"); | |
324 uint instance_id = adr_t->instance_id(); | |
325 assert(instance_id == alloc->_idx, "wrong allocation"); | |
326 | |
327 int alias_idx = C->get_alias_index(adr_t); | |
328 int offset = adr_t->offset(); | |
329 Node *start_mem = C->start()->proj_out(TypeFunc::Memory); | |
330 Node *alloc_ctrl = alloc->in(TypeFunc::Control); | |
331 Node *alloc_mem = alloc->in(TypeFunc::Memory); | |
332 VectorSet visited(Thread::current()->resource_area()); | |
333 | |
334 | |
335 bool done = sfpt_mem == alloc_mem; | |
336 Node *mem = sfpt_mem; | |
337 while (!done) { | |
338 if (visited.test_set(mem->_idx)) { | |
339 return NULL; // found a loop, give up | |
340 } | |
341 mem = scan_mem_chain(mem, alias_idx, offset, start_mem, alloc); | |
342 if (mem == start_mem || mem == alloc_mem) { | |
343 done = true; // hit a sentinel, return appropriate 0 value | |
344 } else if (mem->is_Initialize()) { | |
345 mem = mem->as_Initialize()->find_captured_store(offset, type2aelembytes(ft), &_igvn); | |
346 if (mem == NULL) { | |
347 done = true; // Something go wrong. | |
348 } else if (mem->is_Store()) { | |
349 const TypePtr* atype = mem->as_Store()->adr_type(); | |
350 assert(C->get_alias_index(atype) == Compile::AliasIdxRaw, "store is correct memory slice"); | |
351 done = true; | |
352 } | |
353 } else if (mem->is_Store()) { | |
354 const TypeOopPtr* atype = mem->as_Store()->adr_type()->isa_oopptr(); | |
355 assert(atype != NULL, "address type must be oopptr"); | |
356 assert(C->get_alias_index(atype) == alias_idx && | |
357 atype->is_instance_field() && atype->offset() == offset && | |
358 atype->instance_id() == instance_id, "store is correct memory slice"); | |
359 done = true; | |
360 } else if (mem->is_Phi()) { | |
361 // try to find a phi's unique input | |
362 Node *unique_input = NULL; | |
363 Node *top = C->top(); | |
364 for (uint i = 1; i < mem->req(); i++) { | |
365 Node *n = scan_mem_chain(mem->in(i), alias_idx, offset, start_mem, alloc); | |
366 if (n == NULL || n == top || n == mem) { | |
367 continue; | |
368 } else if (unique_input == NULL) { | |
369 unique_input = n; | |
370 } else if (unique_input != n) { | |
371 unique_input = top; | |
372 break; | |
373 } | |
374 } | |
375 if (unique_input != NULL && unique_input != top) { | |
376 mem = unique_input; | |
377 } else { | |
378 done = true; | |
379 } | |
380 } else { | |
381 assert(false, "unexpected node"); | |
382 } | |
383 } | |
384 if (mem != NULL) { | |
385 if (mem == start_mem || mem == alloc_mem) { | |
386 // hit a sentinel, return appropriate 0 value | |
387 return _igvn.zerocon(ft); | |
388 } else if (mem->is_Store()) { | |
389 return mem->in(MemNode::ValueIn); | |
390 } else if (mem->is_Phi()) { | |
391 // attempt to produce a Phi reflecting the values on the input paths of the Phi | |
392 Node * phi = value_from_mem_phi(mem, ft, ftype, adr_t, alloc, 8); | |
393 if (phi != NULL) { | |
394 return phi; | |
395 } | |
396 } | |
397 } | |
398 // Something go wrong. | |
399 return NULL; | |
400 } | |
401 | |
402 // Check the possibility of scalar replacement. | |
403 bool PhaseMacroExpand::can_eliminate_allocation(AllocateNode *alloc, GrowableArray <SafePointNode *>& safepoints) { | |
404 // Scan the uses of the allocation to check for anything that would | |
405 // prevent us from eliminating it. | |
406 NOT_PRODUCT( const char* fail_eliminate = NULL; ) | |
407 DEBUG_ONLY( Node* disq_node = NULL; ) | |
408 bool can_eliminate = true; | |
409 | |
410 Node* res = alloc->result_cast(); | |
411 const TypeOopPtr* res_type = NULL; | |
412 if (res == NULL) { | |
413 // All users were eliminated. | |
414 } else if (!res->is_CheckCastPP()) { | |
415 alloc->_is_scalar_replaceable = false; // don't try again | |
416 NOT_PRODUCT(fail_eliminate = "Allocation does not have unique CheckCastPP";) | |
417 can_eliminate = false; | |
418 } else { | |
419 res_type = _igvn.type(res)->isa_oopptr(); | |
420 if (res_type == NULL) { | |
421 NOT_PRODUCT(fail_eliminate = "Neither instance or array allocation";) | |
422 can_eliminate = false; | |
423 } else if (res_type->isa_aryptr()) { | |
424 int length = alloc->in(AllocateNode::ALength)->find_int_con(-1); | |
425 if (length < 0) { | |
426 NOT_PRODUCT(fail_eliminate = "Array's size is not constant";) | |
427 can_eliminate = false; | |
428 } | |
429 } | |
430 } | |
431 | |
432 if (can_eliminate && res != NULL) { | |
433 for (DUIterator_Fast jmax, j = res->fast_outs(jmax); | |
434 j < jmax && can_eliminate; j++) { | |
435 Node* use = res->fast_out(j); | |
436 | |
437 if (use->is_AddP()) { | |
438 const TypePtr* addp_type = _igvn.type(use)->is_ptr(); | |
439 int offset = addp_type->offset(); | |
440 | |
441 if (offset == Type::OffsetTop || offset == Type::OffsetBot) { | |
442 NOT_PRODUCT(fail_eliminate = "Undefined field referrence";) | |
443 can_eliminate = false; | |
444 break; | |
445 } | |
446 for (DUIterator_Fast kmax, k = use->fast_outs(kmax); | |
447 k < kmax && can_eliminate; k++) { | |
448 Node* n = use->fast_out(k); | |
449 if (!n->is_Store() && n->Opcode() != Op_CastP2X) { | |
450 DEBUG_ONLY(disq_node = n;) | |
451 if (n->is_Load()) { | |
452 NOT_PRODUCT(fail_eliminate = "Field load";) | |
453 } else { | |
454 NOT_PRODUCT(fail_eliminate = "Not store field referrence";) | |
455 } | |
456 can_eliminate = false; | |
457 } | |
458 } | |
459 } else if (use->is_SafePoint()) { | |
460 SafePointNode* sfpt = use->as_SafePoint(); | |
461 if (sfpt->has_non_debug_use(res)) { | |
462 // Object is passed as argument. | |
463 DEBUG_ONLY(disq_node = use;) | |
464 NOT_PRODUCT(fail_eliminate = "Object is passed as argument";) | |
465 can_eliminate = false; | |
466 } | |
467 Node* sfptMem = sfpt->memory(); | |
468 if (sfptMem == NULL || sfptMem->is_top()) { | |
469 DEBUG_ONLY(disq_node = use;) | |
470 NOT_PRODUCT(fail_eliminate = "NULL or TOP memory";) | |
471 can_eliminate = false; | |
472 } else { | |
473 safepoints.append_if_missing(sfpt); | |
474 } | |
475 } else if (use->Opcode() != Op_CastP2X) { // CastP2X is used by card mark | |
476 if (use->is_Phi()) { | |
477 if (use->outcnt() == 1 && use->unique_out()->Opcode() == Op_Return) { | |
478 NOT_PRODUCT(fail_eliminate = "Object is return value";) | |
479 } else { | |
480 NOT_PRODUCT(fail_eliminate = "Object is referenced by Phi";) | |
481 } | |
482 DEBUG_ONLY(disq_node = use;) | |
483 } else { | |
484 if (use->Opcode() == Op_Return) { | |
485 NOT_PRODUCT(fail_eliminate = "Object is return value";) | |
486 }else { | |
487 NOT_PRODUCT(fail_eliminate = "Object is referenced by node";) | |
488 } | |
489 DEBUG_ONLY(disq_node = use;) | |
490 } | |
491 can_eliminate = false; | |
492 } | |
493 } | |
494 } | |
495 | |
496 #ifndef PRODUCT | |
497 if (PrintEliminateAllocations) { | |
498 if (can_eliminate) { | |
499 tty->print("Scalar "); | |
500 if (res == NULL) | |
501 alloc->dump(); | |
502 else | |
503 res->dump(); | |
504 } else { | |
505 tty->print("NotScalar (%s)", fail_eliminate); | |
506 if (res == NULL) | |
507 alloc->dump(); | |
508 else | |
509 res->dump(); | |
510 #ifdef ASSERT | |
511 if (disq_node != NULL) { | |
512 tty->print(" >>>> "); | |
513 disq_node->dump(); | |
514 } | |
515 #endif /*ASSERT*/ | |
516 } | |
517 } | |
518 #endif | |
519 return can_eliminate; | |
520 } | |
521 | |
522 // Do scalar replacement. | |
523 bool PhaseMacroExpand::scalar_replacement(AllocateNode *alloc, GrowableArray <SafePointNode *>& safepoints) { | |
524 GrowableArray <SafePointNode *> safepoints_done; | |
525 | |
526 ciKlass* klass = NULL; | |
527 ciInstanceKlass* iklass = NULL; | |
528 int nfields = 0; | |
529 int array_base; | |
530 int element_size; | |
531 BasicType basic_elem_type; | |
532 ciType* elem_type; | |
533 | |
534 Node* res = alloc->result_cast(); | |
535 const TypeOopPtr* res_type = NULL; | |
536 if (res != NULL) { // Could be NULL when there are no users | |
537 res_type = _igvn.type(res)->isa_oopptr(); | |
538 } | |
539 | |
540 if (res != NULL) { | |
541 klass = res_type->klass(); | |
542 if (res_type->isa_instptr()) { | |
543 // find the fields of the class which will be needed for safepoint debug information | |
544 assert(klass->is_instance_klass(), "must be an instance klass."); | |
545 iklass = klass->as_instance_klass(); | |
546 nfields = iklass->nof_nonstatic_fields(); | |
547 } else { | |
548 // find the array's elements which will be needed for safepoint debug information | |
549 nfields = alloc->in(AllocateNode::ALength)->find_int_con(-1); | |
550 assert(klass->is_array_klass() && nfields >= 0, "must be an array klass."); | |
551 elem_type = klass->as_array_klass()->element_type(); | |
552 basic_elem_type = elem_type->basic_type(); | |
553 array_base = arrayOopDesc::base_offset_in_bytes(basic_elem_type); | |
554 element_size = type2aelembytes(basic_elem_type); | |
555 } | |
556 } | |
557 // | |
558 // Process the safepoint uses | |
559 // | |
560 while (safepoints.length() > 0) { | |
561 SafePointNode* sfpt = safepoints.pop(); | |
562 Node* mem = sfpt->memory(); | |
563 uint first_ind = sfpt->req(); | |
564 SafePointScalarObjectNode* sobj = new (C, 1) SafePointScalarObjectNode(res_type, | |
565 #ifdef ASSERT | |
566 alloc, | |
567 #endif | |
568 first_ind, nfields); | |
569 sobj->init_req(0, sfpt->in(TypeFunc::Control)); | |
570 transform_later(sobj); | |
571 | |
572 // Scan object's fields adding an input to the safepoint for each field. | |
573 for (int j = 0; j < nfields; j++) { | |
574 int offset; | |
575 ciField* field = NULL; | |
576 if (iklass != NULL) { | |
577 field = iklass->nonstatic_field_at(j); | |
578 offset = field->offset(); | |
579 elem_type = field->type(); | |
580 basic_elem_type = field->layout_type(); | |
581 } else { | |
582 offset = array_base + j * element_size; | |
583 } | |
584 | |
585 const Type *field_type; | |
586 // The next code is taken from Parse::do_get_xxx(). | |
587 if (basic_elem_type == T_OBJECT) { | |
588 if (!elem_type->is_loaded()) { | |
589 field_type = TypeInstPtr::BOTTOM; | |
590 } else if (field != NULL && field->is_constant()) { | |
591 // This can happen if the constant oop is non-perm. | |
592 ciObject* con = field->constant_value().as_object(); | |
593 // Do not "join" in the previous type; it doesn't add value, | |
594 // and may yield a vacuous result if the field is of interface type. | |
595 field_type = TypeOopPtr::make_from_constant(con)->isa_oopptr(); | |
596 assert(field_type != NULL, "field singleton type must be consistent"); | |
597 } else { | |
598 field_type = TypeOopPtr::make_from_klass(elem_type->as_klass()); | |
599 } | |
600 } else { | |
601 field_type = Type::get_const_basic_type(basic_elem_type); | |
602 } | |
603 | |
604 const TypeOopPtr *field_addr_type = res_type->add_offset(offset)->isa_oopptr(); | |
605 | |
606 Node *field_val = value_from_mem(mem, basic_elem_type, field_type, field_addr_type, alloc); | |
607 if (field_val == NULL) { | |
608 // we weren't able to find a value for this field, | |
609 // give up on eliminating this allocation | |
610 alloc->_is_scalar_replaceable = false; // don't try again | |
611 // remove any extra entries we added to the safepoint | |
612 uint last = sfpt->req() - 1; | |
613 for (int k = 0; k < j; k++) { | |
614 sfpt->del_req(last--); | |
615 } | |
616 // rollback processed safepoints | |
617 while (safepoints_done.length() > 0) { | |
618 SafePointNode* sfpt_done = safepoints_done.pop(); | |
619 // remove any extra entries we added to the safepoint | |
620 last = sfpt_done->req() - 1; | |
621 for (int k = 0; k < nfields; k++) { | |
622 sfpt_done->del_req(last--); | |
623 } | |
624 JVMState *jvms = sfpt_done->jvms(); | |
625 jvms->set_endoff(sfpt_done->req()); | |
626 // Now make a pass over the debug information replacing any references | |
627 // to SafePointScalarObjectNode with the allocated object. | |
628 int start = jvms->debug_start(); | |
629 int end = jvms->debug_end(); | |
630 for (int i = start; i < end; i++) { | |
631 if (sfpt_done->in(i)->is_SafePointScalarObject()) { | |
632 SafePointScalarObjectNode* scobj = sfpt_done->in(i)->as_SafePointScalarObject(); | |
633 if (scobj->first_index() == sfpt_done->req() && | |
634 scobj->n_fields() == (uint)nfields) { | |
635 assert(scobj->alloc() == alloc, "sanity"); | |
636 sfpt_done->set_req(i, res); | |
637 } | |
638 } | |
639 } | |
640 } | |
641 #ifndef PRODUCT | |
642 if (PrintEliminateAllocations) { | |
643 if (field != NULL) { | |
644 tty->print("=== At SafePoint node %d can't find value of Field: ", | |
645 sfpt->_idx); | |
646 field->print(); | |
647 int field_idx = C->get_alias_index(field_addr_type); | |
648 tty->print(" (alias_idx=%d)", field_idx); | |
649 } else { // Array's element | |
650 tty->print("=== At SafePoint node %d can't find value of array element [%d]", | |
651 sfpt->_idx, j); | |
652 } | |
653 tty->print(", which prevents elimination of: "); | |
654 if (res == NULL) | |
655 alloc->dump(); | |
656 else | |
657 res->dump(); | |
658 } | |
659 #endif | |
660 return false; | |
661 } | |
662 sfpt->add_req(field_val); | |
663 } | |
664 JVMState *jvms = sfpt->jvms(); | |
665 jvms->set_endoff(sfpt->req()); | |
666 // Now make a pass over the debug information replacing any references | |
667 // to the allocated object with "sobj" | |
668 int start = jvms->debug_start(); | |
669 int end = jvms->debug_end(); | |
670 for (int i = start; i < end; i++) { | |
671 if (sfpt->in(i) == res) { | |
672 sfpt->set_req(i, sobj); | |
673 } | |
674 } | |
675 safepoints_done.append_if_missing(sfpt); // keep it for rollback | |
676 } | |
677 return true; | |
678 } | |
679 | |
680 // Process users of eliminated allocation. | |
681 void PhaseMacroExpand::process_users_of_allocation(AllocateNode *alloc) { | |
682 Node* res = alloc->result_cast(); | |
683 if (res != NULL) { | |
684 for (DUIterator_Last jmin, j = res->last_outs(jmin); j >= jmin; ) { | |
685 Node *use = res->last_out(j); | |
686 uint oc1 = res->outcnt(); | |
687 | |
688 if (use->is_AddP()) { | |
689 for (DUIterator_Last kmin, k = use->last_outs(kmin); k >= kmin; ) { | |
690 Node *n = use->last_out(k); | |
691 uint oc2 = use->outcnt(); | |
692 if (n->is_Store()) { | |
693 _igvn.replace_node(n, n->in(MemNode::Memory)); | |
694 } else { | |
695 assert( n->Opcode() == Op_CastP2X, "CastP2X required"); | |
696 eliminate_card_mark(n); | |
697 } | |
698 k -= (oc2 - use->outcnt()); | |
699 } | |
700 } else { | |
701 assert( !use->is_SafePoint(), "safepoint uses must have been already elimiated"); | |
702 assert( use->Opcode() == Op_CastP2X, "CastP2X required"); | |
703 eliminate_card_mark(use); | |
704 } | |
705 j -= (oc1 - res->outcnt()); | |
706 } | |
707 assert(res->outcnt() == 0, "all uses of allocated objects must be deleted"); | |
708 _igvn.remove_dead_node(res); | |
709 } | |
710 | |
711 // | |
712 // Process other users of allocation's projections | |
713 // | |
714 if (_resproj != NULL && _resproj->outcnt() != 0) { | |
715 for (DUIterator_Last jmin, j = _resproj->last_outs(jmin); j >= jmin; ) { | |
716 Node *use = _resproj->last_out(j); | |
717 uint oc1 = _resproj->outcnt(); | |
718 if (use->is_Initialize()) { | |
719 // Eliminate Initialize node. | |
720 InitializeNode *init = use->as_Initialize(); | |
721 assert(init->outcnt() <= 2, "only a control and memory projection expected"); | |
722 Node *ctrl_proj = init->proj_out(TypeFunc::Control); | |
723 if (ctrl_proj != NULL) { | |
724 assert(init->in(TypeFunc::Control) == _fallthroughcatchproj, "allocation control projection"); | |
725 _igvn.replace_node(ctrl_proj, _fallthroughcatchproj); | |
726 } | |
727 Node *mem_proj = init->proj_out(TypeFunc::Memory); | |
728 if (mem_proj != NULL) { | |
729 Node *mem = init->in(TypeFunc::Memory); | |
730 #ifdef ASSERT | |
731 if (mem->is_MergeMem()) { | |
732 assert(mem->in(TypeFunc::Memory) == _memproj_fallthrough, "allocation memory projection"); | |
733 } else { | |
734 assert(mem == _memproj_fallthrough, "allocation memory projection"); | |
735 } | |
736 #endif | |
737 _igvn.replace_node(mem_proj, mem); | |
738 } | |
739 } else if (use->is_AddP()) { | |
740 // raw memory addresses used only by the initialization | |
741 _igvn.hash_delete(use); | |
742 _igvn.subsume_node(use, C->top()); | |
743 } else { | |
744 assert(false, "only Initialize or AddP expected"); | |
745 } | |
746 j -= (oc1 - _resproj->outcnt()); | |
747 } | |
748 } | |
749 if (_fallthroughcatchproj != NULL) { | |
750 _igvn.replace_node(_fallthroughcatchproj, alloc->in(TypeFunc::Control)); | |
751 } | |
752 if (_memproj_fallthrough != NULL) { | |
753 _igvn.replace_node(_memproj_fallthrough, alloc->in(TypeFunc::Memory)); | |
754 } | |
755 if (_memproj_catchall != NULL) { | |
756 _igvn.replace_node(_memproj_catchall, C->top()); | |
757 } | |
758 if (_ioproj_fallthrough != NULL) { | |
759 _igvn.replace_node(_ioproj_fallthrough, alloc->in(TypeFunc::I_O)); | |
760 } | |
761 if (_ioproj_catchall != NULL) { | |
762 _igvn.replace_node(_ioproj_catchall, C->top()); | |
763 } | |
764 if (_catchallcatchproj != NULL) { | |
765 _igvn.replace_node(_catchallcatchproj, C->top()); | |
766 } | |
767 } | |
768 | |
769 bool PhaseMacroExpand::eliminate_allocate_node(AllocateNode *alloc) { | |
770 | |
771 if (!EliminateAllocations || !alloc->_is_scalar_replaceable) { | |
772 return false; | |
773 } | |
774 | |
775 extract_call_projections(alloc); | |
776 | |
777 GrowableArray <SafePointNode *> safepoints; | |
778 if (!can_eliminate_allocation(alloc, safepoints)) { | |
779 return false; | |
780 } | |
781 | |
782 if (!scalar_replacement(alloc, safepoints)) { | |
783 return false; | |
784 } | |
785 | |
786 process_users_of_allocation(alloc); | |
787 | |
788 #ifndef PRODUCT | |
789 if (PrintEliminateAllocations) { | |
790 if (alloc->is_AllocateArray()) | |
791 tty->print_cr("++++ Eliminated: %d AllocateArray", alloc->_idx); | |
792 else | |
793 tty->print_cr("++++ Eliminated: %d Allocate", alloc->_idx); | |
794 } | |
795 #endif | |
796 | |
797 return true; | |
182 } | 798 } |
183 | 799 |
184 | 800 |
185 //---------------------------set_eden_pointers------------------------- | 801 //---------------------------set_eden_pointers------------------------- |
186 void PhaseMacroExpand::set_eden_pointers(Node* &eden_top_adr, Node* &eden_end_adr) { | 802 void PhaseMacroExpand::set_eden_pointers(Node* &eden_top_adr, Node* &eden_end_adr) { |
283 Node* i_o = alloc->in(TypeFunc::I_O); | 899 Node* i_o = alloc->in(TypeFunc::I_O); |
284 Node* size_in_bytes = alloc->in(AllocateNode::AllocSize); | 900 Node* size_in_bytes = alloc->in(AllocateNode::AllocSize); |
285 Node* klass_node = alloc->in(AllocateNode::KlassNode); | 901 Node* klass_node = alloc->in(AllocateNode::KlassNode); |
286 Node* initial_slow_test = alloc->in(AllocateNode::InitialTest); | 902 Node* initial_slow_test = alloc->in(AllocateNode::InitialTest); |
287 | 903 |
904 // With escape analysis, the entire memory state was needed to be able to | |
905 // eliminate the allocation. Since the allocations cannot be eliminated, | |
906 // optimize it to the raw slice. | |
907 if (mem->is_MergeMem()) { | |
908 mem = mem->as_MergeMem()->memory_at(Compile::AliasIdxRaw); | |
909 } | |
910 | |
288 Node* eden_top_adr; | 911 Node* eden_top_adr; |
289 Node* eden_end_adr; | 912 Node* eden_end_adr; |
290 set_eden_pointers(eden_top_adr, eden_end_adr); | 913 set_eden_pointers(eden_top_adr, eden_end_adr); |
291 | 914 |
292 uint raw_idx = C->get_alias_index(TypeRawPtr::BOTTOM); | 915 uint raw_idx = C->get_alias_index(TypeRawPtr::BOTTOM); |
913 | 1536 |
914 | 1537 |
915 //------------------------------expand_lock_node---------------------- | 1538 //------------------------------expand_lock_node---------------------- |
916 void PhaseMacroExpand::expand_lock_node(LockNode *lock) { | 1539 void PhaseMacroExpand::expand_lock_node(LockNode *lock) { |
917 | 1540 |
918 if (eliminate_locking_node(lock)) { | |
919 return; | |
920 } | |
921 | |
922 Node* ctrl = lock->in(TypeFunc::Control); | 1541 Node* ctrl = lock->in(TypeFunc::Control); |
923 Node* mem = lock->in(TypeFunc::Memory); | 1542 Node* mem = lock->in(TypeFunc::Memory); |
924 Node* obj = lock->obj_node(); | 1543 Node* obj = lock->obj_node(); |
925 Node* box = lock->box_node(); | 1544 Node* box = lock->box_node(); |
926 Node* flock = lock->fastlock_node(); | 1545 Node* flock = lock->fastlock_node(); |
969 | 1588 |
970 } | 1589 } |
971 | 1590 |
972 //------------------------------expand_unlock_node---------------------- | 1591 //------------------------------expand_unlock_node---------------------- |
973 void PhaseMacroExpand::expand_unlock_node(UnlockNode *unlock) { | 1592 void PhaseMacroExpand::expand_unlock_node(UnlockNode *unlock) { |
974 | |
975 if (eliminate_locking_node(unlock)) { | |
976 return; | |
977 } | |
978 | 1593 |
979 Node* ctrl = unlock->in(TypeFunc::Control); | 1594 Node* ctrl = unlock->in(TypeFunc::Control); |
980 Node* mem = unlock->in(TypeFunc::Memory); | 1595 Node* mem = unlock->in(TypeFunc::Memory); |
981 Node* obj = unlock->obj_node(); | 1596 Node* obj = unlock->obj_node(); |
982 Node* box = unlock->box_node(); | 1597 Node* box = unlock->box_node(); |
1028 //------------------------------expand_macro_nodes---------------------- | 1643 //------------------------------expand_macro_nodes---------------------- |
1029 // Returns true if a failure occurred. | 1644 // Returns true if a failure occurred. |
1030 bool PhaseMacroExpand::expand_macro_nodes() { | 1645 bool PhaseMacroExpand::expand_macro_nodes() { |
1031 if (C->macro_count() == 0) | 1646 if (C->macro_count() == 0) |
1032 return false; | 1647 return false; |
1033 // Make sure expansion will not cause node limit to be exceeded. Worst case is a | 1648 // attempt to eliminate allocations |
1034 // macro node gets expanded into about 50 nodes. Allow 50% more for optimization | 1649 bool progress = true; |
1650 while (progress) { | |
1651 progress = false; | |
1652 for (int i = C->macro_count(); i > 0; i--) { | |
1653 Node * n = C->macro_node(i-1); | |
1654 bool success = false; | |
1655 debug_only(int old_macro_count = C->macro_count();); | |
1656 switch (n->class_id()) { | |
1657 case Node::Class_Allocate: | |
1658 case Node::Class_AllocateArray: | |
1659 success = eliminate_allocate_node(n->as_Allocate()); | |
1660 break; | |
1661 case Node::Class_Lock: | |
1662 case Node::Class_Unlock: | |
1663 success = eliminate_locking_node(n->as_AbstractLock()); | |
1664 break; | |
1665 default: | |
1666 assert(false, "unknown node type in macro list"); | |
1667 } | |
1668 assert(success == (C->macro_count() < old_macro_count), "elimination reduces macro count"); | |
1669 progress = progress || success; | |
1670 } | |
1671 } | |
1672 // Make sure expansion will not cause node limit to be exceeded. | |
1673 // Worst case is a macro node gets expanded into about 50 nodes. | |
1674 // Allow 50% more for optimization. | |
1035 if (C->check_node_count(C->macro_count() * 75, "out of nodes before macro expansion" ) ) | 1675 if (C->check_node_count(C->macro_count() * 75, "out of nodes before macro expansion" ) ) |
1036 return true; | 1676 return true; |
1677 | |
1037 // expand "macro" nodes | 1678 // expand "macro" nodes |
1038 // nodes are removed from the macro list as they are processed | 1679 // nodes are removed from the macro list as they are processed |
1039 while (C->macro_count() > 0) { | 1680 while (C->macro_count() > 0) { |
1040 Node * n = C->macro_node(0); | 1681 int macro_count = C->macro_count(); |
1682 Node * n = C->macro_node(macro_count-1); | |
1041 assert(n->is_macro(), "only macro nodes expected here"); | 1683 assert(n->is_macro(), "only macro nodes expected here"); |
1042 if (_igvn.type(n) == Type::TOP || n->in(0)->is_top() ) { | 1684 if (_igvn.type(n) == Type::TOP || n->in(0)->is_top() ) { |
1043 // node is unreachable, so don't try to expand it | 1685 // node is unreachable, so don't try to expand it |
1044 C->remove_macro_node(n); | 1686 C->remove_macro_node(n); |
1045 continue; | 1687 continue; |
1058 expand_unlock_node(n->as_Unlock()); | 1700 expand_unlock_node(n->as_Unlock()); |
1059 break; | 1701 break; |
1060 default: | 1702 default: |
1061 assert(false, "unknown node type in macro list"); | 1703 assert(false, "unknown node type in macro list"); |
1062 } | 1704 } |
1705 assert(C->macro_count() < macro_count, "must have deleted a node from macro list"); | |
1063 if (C->failing()) return true; | 1706 if (C->failing()) return true; |
1064 } | 1707 } |
1065 _igvn.optimize(); | 1708 _igvn.optimize(); |
1066 return false; | 1709 return false; |
1067 } | 1710 } |