Mercurial > hg > truffle
comparison src/share/vm/opto/callGenerator.cpp @ 7473:d092d1b31229
8005071: Incremental inlining for JSR 292
Summary: post parse inlining driven by number of live nodes.
Reviewed-by: twisti, kvn, jrose
author | roland |
---|---|
date | Sun, 23 Dec 2012 17:08:22 +0100 |
parents | ad5dd04754ee |
children | 5698813d45eb |
comparison
equal
deleted
inserted
replaced
7445:cd962e15c08e | 7473:d092d1b31229 |
---|---|
260 return new VirtualCallGenerator(m, vtable_index); | 260 return new VirtualCallGenerator(m, vtable_index); |
261 } | 261 } |
262 | 262 |
263 // Allow inlining decisions to be delayed | 263 // Allow inlining decisions to be delayed |
264 class LateInlineCallGenerator : public DirectCallGenerator { | 264 class LateInlineCallGenerator : public DirectCallGenerator { |
265 protected: | |
265 CallGenerator* _inline_cg; | 266 CallGenerator* _inline_cg; |
267 | |
268 virtual bool do_late_inline_check(JVMState* jvms) { return true; } | |
266 | 269 |
267 public: | 270 public: |
268 LateInlineCallGenerator(ciMethod* method, CallGenerator* inline_cg) : | 271 LateInlineCallGenerator(ciMethod* method, CallGenerator* inline_cg) : |
269 DirectCallGenerator(method, true), _inline_cg(inline_cg) {} | 272 DirectCallGenerator(method, true), _inline_cg(inline_cg) {} |
270 | 273 |
277 Compile *C = Compile::current(); | 280 Compile *C = Compile::current(); |
278 C->print_inlining_skip(this); | 281 C->print_inlining_skip(this); |
279 | 282 |
280 // Record that this call site should be revisited once the main | 283 // Record that this call site should be revisited once the main |
281 // parse is finished. | 284 // parse is finished. |
282 Compile::current()->add_late_inline(this); | 285 if (!is_mh_late_inline()) { |
286 C->add_late_inline(this); | |
287 } | |
283 | 288 |
284 // Emit the CallStaticJava and request separate projections so | 289 // Emit the CallStaticJava and request separate projections so |
285 // that the late inlining logic can distinguish between fall | 290 // that the late inlining logic can distinguish between fall |
286 // through and exceptional uses of the memory and io projections | 291 // through and exceptional uses of the memory and io projections |
287 // as is done for allocations and macro expansion. | 292 // as is done for allocations and macro expansion. |
288 return DirectCallGenerator::generate(jvms); | 293 return DirectCallGenerator::generate(jvms); |
289 } | 294 } |
295 | |
296 virtual void print_inlining_late(const char* msg) { | |
297 CallNode* call = call_node(); | |
298 Compile* C = Compile::current(); | |
299 C->print_inlining_insert(this); | |
300 C->print_inlining(method(), call->jvms()->depth()-1, call->jvms()->bci(), msg); | |
301 } | |
302 | |
290 }; | 303 }; |
291 | |
292 | 304 |
293 void LateInlineCallGenerator::do_late_inline() { | 305 void LateInlineCallGenerator::do_late_inline() { |
294 // Can't inline it | 306 // Can't inline it |
295 if (call_node() == NULL || call_node()->outcnt() == 0 || | 307 if (call_node() == NULL || call_node()->outcnt() == 0 || |
296 call_node()->in(0) == NULL || call_node()->in(0)->is_top()) | 308 call_node()->in(0) == NULL || call_node()->in(0)->is_top()) |
297 return; | 309 return; |
310 | |
311 for (int i1 = 0; i1 < method()->arg_size(); i1++) { | |
312 if (call_node()->in(TypeFunc::Parms + i1)->is_top()) { | |
313 assert(Compile::current()->inlining_incrementally(), "shouldn't happen during parsing"); | |
314 return; | |
315 } | |
316 } | |
317 | |
318 if (call_node()->in(TypeFunc::Memory)->is_top()) { | |
319 assert(Compile::current()->inlining_incrementally(), "shouldn't happen during parsing"); | |
320 return; | |
321 } | |
298 | 322 |
299 CallStaticJavaNode* call = call_node(); | 323 CallStaticJavaNode* call = call_node(); |
300 | 324 |
301 // Make a clone of the JVMState that appropriate to use for driving a parse | 325 // Make a clone of the JVMState that appropriate to use for driving a parse |
302 Compile* C = Compile::current(); | 326 Compile* C = Compile::current(); |
322 for (int i1 = 0; i1 < nargs; i1++) { | 346 for (int i1 = 0; i1 < nargs; i1++) { |
323 map->set_req(i1 + jvms->argoff(), call->in(TypeFunc::Parms + i1)); | 347 map->set_req(i1 + jvms->argoff(), call->in(TypeFunc::Parms + i1)); |
324 } | 348 } |
325 } | 349 } |
326 | 350 |
351 if (!do_late_inline_check(jvms)) { | |
352 map->disconnect_inputs(NULL, C); | |
353 return; | |
354 } | |
355 | |
327 C->print_inlining_insert(this); | 356 C->print_inlining_insert(this); |
328 | 357 |
329 CompileLog* log = C->log(); | 358 CompileLog* log = C->log(); |
330 if (log != NULL) { | 359 if (log != NULL) { |
331 log->head("late_inline method='%d'", log->identify(method())); | 360 log->head("late_inline method='%d'", log->identify(method())); |
358 int result_size = method()->return_type()->size(); | 387 int result_size = method()->return_type()->size(); |
359 if (result_size != 0 && !kit.stopped()) { | 388 if (result_size != 0 && !kit.stopped()) { |
360 result = (result_size == 1) ? kit.pop() : kit.pop_pair(); | 389 result = (result_size == 1) ? kit.pop() : kit.pop_pair(); |
361 } | 390 } |
362 | 391 |
392 C->set_has_loops(C->has_loops() || _inline_cg->method()->has_loops()); | |
393 C->env()->notice_inlined_method(_inline_cg->method()); | |
394 C->set_inlining_progress(true); | |
395 | |
363 kit.replace_call(call, result); | 396 kit.replace_call(call, result); |
364 } | 397 } |
365 | 398 |
366 | 399 |
367 CallGenerator* CallGenerator::for_late_inline(ciMethod* method, CallGenerator* inline_cg) { | 400 CallGenerator* CallGenerator::for_late_inline(ciMethod* method, CallGenerator* inline_cg) { |
368 return new LateInlineCallGenerator(method, inline_cg); | 401 return new LateInlineCallGenerator(method, inline_cg); |
402 } | |
403 | |
404 class LateInlineMHCallGenerator : public LateInlineCallGenerator { | |
405 ciMethod* _caller; | |
406 int _attempt; | |
407 bool _input_not_const; | |
408 | |
409 virtual bool do_late_inline_check(JVMState* jvms); | |
410 virtual bool already_attempted() const { return _attempt > 0; } | |
411 | |
412 public: | |
413 LateInlineMHCallGenerator(ciMethod* caller, ciMethod* callee, bool input_not_const) : | |
414 LateInlineCallGenerator(callee, NULL), _caller(caller), _attempt(0), _input_not_const(input_not_const) {} | |
415 | |
416 virtual bool is_mh_late_inline() const { return true; } | |
417 | |
418 virtual JVMState* generate(JVMState* jvms) { | |
419 JVMState* new_jvms = LateInlineCallGenerator::generate(jvms); | |
420 if (_input_not_const) { | |
421 // inlining won't be possible so no need to enqueue right now. | |
422 call_node()->set_generator(this); | |
423 } else { | |
424 Compile::current()->add_late_inline(this); | |
425 } | |
426 return new_jvms; | |
427 } | |
428 | |
429 virtual void print_inlining_late(const char* msg) { | |
430 if (!_input_not_const) return; | |
431 LateInlineCallGenerator::print_inlining_late(msg); | |
432 } | |
433 }; | |
434 | |
435 bool LateInlineMHCallGenerator::do_late_inline_check(JVMState* jvms) { | |
436 | |
437 CallGenerator* cg = for_method_handle_inline(jvms, _caller, method(), _input_not_const); | |
438 | |
439 if (!_input_not_const) { | |
440 _attempt++; | |
441 } | |
442 | |
443 if (cg != NULL) { | |
444 assert(!cg->is_late_inline() && cg->is_inline(), "we're doing late inlining"); | |
445 _inline_cg = cg; | |
446 Compile::current()->dec_number_of_mh_late_inlines(); | |
447 return true; | |
448 } | |
449 | |
450 call_node()->set_generator(this); | |
451 return false; | |
452 } | |
453 | |
454 CallGenerator* CallGenerator::for_mh_late_inline(ciMethod* caller, ciMethod* callee, bool input_not_const) { | |
455 Compile::current()->inc_number_of_mh_late_inlines(); | |
456 CallGenerator* cg = new LateInlineMHCallGenerator(caller, callee, input_not_const); | |
457 return cg; | |
458 } | |
459 | |
460 class LateInlineStringCallGenerator : public LateInlineCallGenerator { | |
461 | |
462 public: | |
463 LateInlineStringCallGenerator(ciMethod* method, CallGenerator* inline_cg) : | |
464 LateInlineCallGenerator(method, inline_cg) {} | |
465 | |
466 virtual JVMState* generate(JVMState* jvms) { | |
467 Compile *C = Compile::current(); | |
468 C->print_inlining_skip(this); | |
469 | |
470 C->add_string_late_inline(this); | |
471 | |
472 JVMState* new_jvms = DirectCallGenerator::generate(jvms); | |
473 return new_jvms; | |
474 } | |
475 }; | |
476 | |
477 CallGenerator* CallGenerator::for_string_late_inline(ciMethod* method, CallGenerator* inline_cg) { | |
478 return new LateInlineStringCallGenerator(method, inline_cg); | |
369 } | 479 } |
370 | 480 |
371 | 481 |
372 //---------------------------WarmCallGenerator-------------------------------- | 482 //---------------------------WarmCallGenerator-------------------------------- |
373 // Internal class which handles initial deferral of inlining decisions. | 483 // Internal class which handles initial deferral of inlining decisions. |
584 } | 694 } |
585 return kit.transfer_exceptions_into_jvms(); | 695 return kit.transfer_exceptions_into_jvms(); |
586 } | 696 } |
587 | 697 |
588 | 698 |
589 CallGenerator* CallGenerator::for_method_handle_call(JVMState* jvms, ciMethod* caller, ciMethod* callee) { | 699 CallGenerator* CallGenerator::for_method_handle_call(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool delayed_forbidden) { |
590 assert(callee->is_method_handle_intrinsic() || | 700 assert(callee->is_method_handle_intrinsic() || |
591 callee->is_compiled_lambda_form(), "for_method_handle_call mismatch"); | 701 callee->is_compiled_lambda_form(), "for_method_handle_call mismatch"); |
592 CallGenerator* cg = CallGenerator::for_method_handle_inline(jvms, caller, callee); | 702 bool input_not_const; |
593 if (cg != NULL) | 703 CallGenerator* cg = CallGenerator::for_method_handle_inline(jvms, caller, callee, input_not_const); |
594 return cg; | 704 Compile* C = Compile::current(); |
595 return CallGenerator::for_direct_call(callee); | 705 if (cg != NULL) { |
596 } | 706 if (!delayed_forbidden && AlwaysIncrementalInline) { |
597 | 707 return CallGenerator::for_late_inline(callee, cg); |
598 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee) { | 708 } else { |
709 return cg; | |
710 } | |
711 } | |
712 int bci = jvms->bci(); | |
713 ciCallProfile profile = caller->call_profile_at_bci(bci); | |
714 int call_site_count = caller->scale_count(profile.count()); | |
715 | |
716 if (IncrementalInline && call_site_count > 0 && | |
717 (input_not_const || !C->inlining_incrementally() || C->over_inlining_cutoff())) { | |
718 return CallGenerator::for_mh_late_inline(caller, callee, input_not_const); | |
719 } else { | |
720 return CallGenerator::for_direct_call(callee); | |
721 } | |
722 } | |
723 | |
724 CallGenerator* CallGenerator::for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const) { | |
599 GraphKit kit(jvms); | 725 GraphKit kit(jvms); |
600 PhaseGVN& gvn = kit.gvn(); | 726 PhaseGVN& gvn = kit.gvn(); |
601 Compile* C = kit.C; | 727 Compile* C = kit.C; |
602 vmIntrinsics::ID iid = callee->intrinsic_id(); | 728 vmIntrinsics::ID iid = callee->intrinsic_id(); |
729 input_not_const = true; | |
603 switch (iid) { | 730 switch (iid) { |
604 case vmIntrinsics::_invokeBasic: | 731 case vmIntrinsics::_invokeBasic: |
605 { | 732 { |
606 // Get MethodHandle receiver: | 733 // Get MethodHandle receiver: |
607 Node* receiver = kit.argument(0); | 734 Node* receiver = kit.argument(0); |
608 if (receiver->Opcode() == Op_ConP) { | 735 if (receiver->Opcode() == Op_ConP) { |
736 input_not_const = false; | |
609 const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr(); | 737 const TypeOopPtr* oop_ptr = receiver->bottom_type()->is_oopptr(); |
610 ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget(); | 738 ciMethod* target = oop_ptr->const_oop()->as_method_handle()->get_vmtarget(); |
611 guarantee(!target->is_method_handle_intrinsic(), "should not happen"); // XXX remove | 739 guarantee(!target->is_method_handle_intrinsic(), "should not happen"); // XXX remove |
612 const int vtable_index = Method::invalid_vtable_index; | 740 const int vtable_index = Method::invalid_vtable_index; |
613 CallGenerator* cg = C->call_generator(target, vtable_index, false, jvms, true, PROB_ALWAYS); | 741 CallGenerator* cg = C->call_generator(target, vtable_index, false, jvms, true, PROB_ALWAYS, true, true); |
742 assert (!cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here"); | |
614 if (cg != NULL && cg->is_inline()) | 743 if (cg != NULL && cg->is_inline()) |
615 return cg; | 744 return cg; |
616 } else { | |
617 if (PrintInlining) C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), "receiver not constant"); | |
618 } | 745 } |
619 } | 746 } |
620 break; | 747 break; |
621 | 748 |
622 case vmIntrinsics::_linkToVirtual: | 749 case vmIntrinsics::_linkToVirtual: |
625 case vmIntrinsics::_linkToInterface: | 752 case vmIntrinsics::_linkToInterface: |
626 { | 753 { |
627 // Get MemberName argument: | 754 // Get MemberName argument: |
628 Node* member_name = kit.argument(callee->arg_size() - 1); | 755 Node* member_name = kit.argument(callee->arg_size() - 1); |
629 if (member_name->Opcode() == Op_ConP) { | 756 if (member_name->Opcode() == Op_ConP) { |
757 input_not_const = false; | |
630 const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr(); | 758 const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr(); |
631 ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget(); | 759 ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget(); |
632 | 760 |
633 // In lamda forms we erase signature types to avoid resolving issues | 761 // In lamda forms we erase signature types to avoid resolving issues |
634 // involving class loaders. When we optimize a method handle invoke | 762 // involving class loaders. When we optimize a method handle invoke |
659 } | 787 } |
660 } | 788 } |
661 } | 789 } |
662 const int vtable_index = Method::invalid_vtable_index; | 790 const int vtable_index = Method::invalid_vtable_index; |
663 const bool call_is_virtual = target->is_abstract(); // FIXME workaround | 791 const bool call_is_virtual = target->is_abstract(); // FIXME workaround |
664 CallGenerator* cg = C->call_generator(target, vtable_index, call_is_virtual, jvms, true, PROB_ALWAYS); | 792 CallGenerator* cg = C->call_generator(target, vtable_index, call_is_virtual, jvms, true, PROB_ALWAYS, true, true); |
793 assert (!cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here"); | |
665 if (cg != NULL && cg->is_inline()) | 794 if (cg != NULL && cg->is_inline()) |
666 return cg; | 795 return cg; |
667 } | 796 } |
668 } | 797 } |
669 break; | 798 break; |