comparison src/share/vm/opto/bytecodeInfo.cpp @ 6266:1d7922586cf6

7023639: JSR 292 method handle invocation needs a fast path for compiled code 6984705: JSR 292 method handle creation should not go through JNI Summary: remove assembly code for JDK 7 chained method handles Reviewed-by: jrose, twisti, kvn, mhaupt Contributed-by: John Rose <john.r.rose@oracle.com>, Christian Thalinger <christian.thalinger@oracle.com>, Michael Haupt <michael.haupt@oracle.com>
author twisti
date Tue, 24 Jul 2012 10:51:00 -0700
parents 0f4014d7731b
children 957c266d8bc5 b9a9ed0f8eeb c3e799c37717
comparison
equal deleted inserted replaced
6241:aba91a731143 6266:1d7922586cf6
91 caller_method != C->method() && 91 caller_method != C->method() &&
92 caller_method->holder()->is_subclass_of(callee_method->holder())) 92 caller_method->holder()->is_subclass_of(callee_method->holder()))
93 ); 93 );
94 } 94 }
95 95
96 // positive filter: should send be inlined? returns NULL, if yes, or rejection msg 96 // positive filter: should callee be inlined? returns NULL, if yes, or rejection msg
97 const char* InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) const { 97 const char* InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) const {
98 // Allows targeted inlining 98 // Allows targeted inlining
99 if(callee_method->should_inline()) { 99 if(callee_method->should_inline()) {
100 *wci_result = *(WarmCallInfo::always_hot()); 100 *wci_result = *(WarmCallInfo::always_hot());
101 if (PrintInlining && Verbose) { 101 if (PrintInlining && Verbose) {
129 int max_inline_size = default_max_inline_size; 129 int max_inline_size = default_max_inline_size;
130 130
131 int call_site_count = method()->scale_count(profile.count()); 131 int call_site_count = method()->scale_count(profile.count());
132 int invoke_count = method()->interpreter_invocation_count(); 132 int invoke_count = method()->interpreter_invocation_count();
133 133
134 // Bytecoded method handle adapters do not have interpreter
135 // profiling data but only made up MDO data. Get the counter from
136 // there.
137 if (caller_method->is_method_handle_adapter()) {
138 assert(method()->method_data_or_null(), "must have an MDO");
139 ciMethodData* mdo = method()->method_data();
140 ciProfileData* mha_profile = mdo->bci_to_data(caller_bci);
141 assert(mha_profile, "must exist");
142 CounterData* cd = mha_profile->as_CounterData();
143 invoke_count = cd->count();
144 if (invoke_count == 0) {
145 return "method handle not reached";
146 }
147
148 if (_caller_jvms != NULL && _caller_jvms->method() != NULL &&
149 _caller_jvms->method()->method_data() != NULL &&
150 !_caller_jvms->method()->method_data()->is_empty()) {
151 ciMethodData* mdo = _caller_jvms->method()->method_data();
152 ciProfileData* mha_profile = mdo->bci_to_data(_caller_jvms->bci());
153 assert(mha_profile, "must exist");
154 CounterData* cd = mha_profile->as_CounterData();
155 call_site_count = cd->count();
156 } else {
157 call_site_count = invoke_count; // use the same value
158 }
159 }
160
161 assert(invoke_count != 0, "require invocation count greater than zero"); 134 assert(invoke_count != 0, "require invocation count greater than zero");
162 int freq = call_site_count / invoke_count; 135 int freq = call_site_count / invoke_count;
163 136
164 // bump the max size if the call is frequent 137 // bump the max size if the call is frequent
165 if ((freq >= InlineFrequencyRatio) || 138 if ((freq >= InlineFrequencyRatio) ||
187 } 160 }
188 return NULL; 161 return NULL;
189 } 162 }
190 163
191 164
192 // negative filter: should send NOT be inlined? returns NULL, ok to inline, or rejection msg 165 // negative filter: should callee NOT be inlined? returns NULL, ok to inline, or rejection msg
193 const char* InlineTree::should_not_inline(ciMethod *callee_method, ciMethod* caller_method, WarmCallInfo* wci_result) const { 166 const char* InlineTree::should_not_inline(ciMethod *callee_method, ciMethod* caller_method, WarmCallInfo* wci_result) const {
194 // negative filter: should send NOT be inlined? returns NULL (--> inline) or rejection msg 167 // negative filter: should send NOT be inlined? returns NULL (--> inline) or rejection msg
195 if (!UseOldInlining) { 168 if (!UseOldInlining) {
196 const char* fail = NULL; 169 const char* fail = NULL;
197 if (callee_method->is_abstract()) fail = "abstract method"; 170 if ( callee_method->is_abstract()) fail = "abstract method";
198 // note: we allow ik->is_abstract() 171 // note: we allow ik->is_abstract()
199 if (!callee_method->holder()->is_initialized()) fail = "method holder not initialized"; 172 if (!callee_method->holder()->is_initialized()) fail = "method holder not initialized";
200 if (callee_method->is_native()) fail = "native method"; 173 if ( callee_method->is_native()) fail = "native method";
174 if ( callee_method->dont_inline()) fail = "don't inline by annotation";
201 175
202 if (fail) { 176 if (fail) {
203 *wci_result = *(WarmCallInfo::always_cold()); 177 *wci_result = *(WarmCallInfo::always_cold());
204 return fail; 178 return fail;
205 } 179 }
215 if (!top_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { 189 if (!top_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
216 wci_result->set_profit(wci_result->profit() * 0.1); 190 wci_result->set_profit(wci_result->profit() * 0.1);
217 } 191 }
218 } 192 }
219 193
220 if (callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode) { 194 if (callee_method->has_compiled_code() &&
195 callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode) {
221 wci_result->set_profit(wci_result->profit() * 0.1); 196 wci_result->set_profit(wci_result->profit() * 0.1);
222 // %%% adjust wci_result->size()? 197 // %%% adjust wci_result->size()?
223 } 198 }
224 199
225 return NULL; 200 return NULL;
226 } 201 }
227 202
228 // Always inline MethodHandle methods and generated MethodHandle adapters.
229 if (callee_method->is_method_handle_invoke() || callee_method->is_method_handle_adapter())
230 return NULL;
231
232 // First check all inlining restrictions which are required for correctness 203 // First check all inlining restrictions which are required for correctness
233 if (callee_method->is_abstract()) return "abstract method"; 204 if ( callee_method->is_abstract()) return "abstract method";
234 // note: we allow ik->is_abstract() 205 // note: we allow ik->is_abstract()
235 if (!callee_method->holder()->is_initialized()) return "method holder not initialized"; 206 if (!callee_method->holder()->is_initialized()) return "method holder not initialized";
236 if (callee_method->is_native()) return "native method"; 207 if ( callee_method->is_native()) return "native method";
237 if (callee_method->has_unloaded_classes_in_signature()) return "unloaded signature classes"; 208 if ( callee_method->dont_inline()) return "don't inline by annotation";
238 209 if ( callee_method->has_unloaded_classes_in_signature()) return "unloaded signature classes";
239 if (callee_method->should_inline()) { 210
211 if (callee_method->force_inline() || callee_method->should_inline()) {
240 // ignore heuristic controls on inlining 212 // ignore heuristic controls on inlining
241 return NULL; 213 return NULL;
242 } 214 }
243 215
244 // Now perform checks which are heuristic 216 // Now perform checks which are heuristic
245 217
246 if( callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode ) 218 if (callee_method->has_compiled_code() &&
219 callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode) {
247 return "already compiled into a big method"; 220 return "already compiled into a big method";
221 }
248 222
249 // don't inline exception code unless the top method belongs to an 223 // don't inline exception code unless the top method belongs to an
250 // exception class 224 // exception class
251 if (caller_tree() != NULL && 225 if (caller_tree() != NULL &&
252 callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { 226 callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
268 return "profiling method"; 242 return "profiling method";
269 } 243 }
270 } 244 }
271 245
272 // use frequency-based objections only for non-trivial methods 246 // use frequency-based objections only for non-trivial methods
273 if (callee_method->code_size_for_inlining() <= MaxTrivialSize) return NULL; 247 if (callee_method->code_size() <= MaxTrivialSize) return NULL;
274 248
275 // don't use counts with -Xcomp or CTW 249 // don't use counts with -Xcomp or CTW
276 if (UseInterpreter && !CompileTheWorld) { 250 if (UseInterpreter && !CompileTheWorld) {
277 251
278 if (!callee_method->has_compiled_code() && 252 if (!callee_method->has_compiled_code() &&
317 // accessor methods are not subject to any of the following limits. 291 // accessor methods are not subject to any of the following limits.
318 return NULL; 292 return NULL;
319 } 293 }
320 294
321 // suppress a few checks for accessors and trivial methods 295 // suppress a few checks for accessors and trivial methods
322 if (callee_method->code_size_for_inlining() > MaxTrivialSize) { 296 if (callee_method->code_size() > MaxTrivialSize) {
323 297
324 // don't inline into giant methods 298 // don't inline into giant methods
325 if (C->unique() > (uint)NodeCountInliningCutoff) { 299 if (C->unique() > (uint)NodeCountInliningCutoff) {
326 return "NodeCountInliningCutoff"; 300 return "NodeCountInliningCutoff";
327 } 301 }
344 if (inline_level() > _max_inline_level) { 318 if (inline_level() > _max_inline_level) {
345 return "inlining too deep"; 319 return "inlining too deep";
346 } 320 }
347 321
348 // detect direct and indirect recursive inlining 322 // detect direct and indirect recursive inlining
349 { 323 if (!callee_method->is_compiled_lambda_form()) {
350 // count the current method and the callee 324 // count the current method and the callee
351 int inline_level = (method() == callee_method) ? 1 : 0; 325 int inline_level = (method() == callee_method) ? 1 : 0;
352 if (inline_level > MaxRecursiveInlineLevel) 326 if (inline_level > MaxRecursiveInlineLevel)
353 return "recursively inlining too deep"; 327 return "recursively inlining too deep";
354 // count callers of current method and callee 328 // count callers of current method and callee
410 384
411 //------------------------------check_can_parse-------------------------------- 385 //------------------------------check_can_parse--------------------------------
412 const char* InlineTree::check_can_parse(ciMethod* callee) { 386 const char* InlineTree::check_can_parse(ciMethod* callee) {
413 // Certain methods cannot be parsed at all: 387 // Certain methods cannot be parsed at all:
414 if ( callee->is_native()) return "native method"; 388 if ( callee->is_native()) return "native method";
389 if ( callee->is_abstract()) return "abstract method";
415 if (!callee->can_be_compiled()) return "not compilable (disabled)"; 390 if (!callee->can_be_compiled()) return "not compilable (disabled)";
416 if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)"; 391 if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)";
417 if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)"; 392 if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)";
418 return NULL; 393 return NULL;
419 } 394 }
424 CompileTask::print_inlining(callee_method, inline_level(), caller_bci, failure_msg ? failure_msg : "inline"); 399 CompileTask::print_inlining(callee_method, inline_level(), caller_bci, failure_msg ? failure_msg : "inline");
425 if (callee_method == NULL) tty->print(" callee not monotonic or profiled"); 400 if (callee_method == NULL) tty->print(" callee not monotonic or profiled");
426 if (Verbose && callee_method) { 401 if (Verbose && callee_method) {
427 const InlineTree *top = this; 402 const InlineTree *top = this;
428 while( top->caller_tree() != NULL ) { top = top->caller_tree(); } 403 while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
429 tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count()); 404 //tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
430 } 405 }
431 } 406 }
432 407
433 //------------------------------ok_to_inline----------------------------------- 408 //------------------------------ok_to_inline-----------------------------------
434 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci) { 409 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci) {
447 int caller_bci = jvms->bci(); 422 int caller_bci = jvms->bci();
448 ciMethod *caller_method = jvms->method(); 423 ciMethod *caller_method = jvms->method();
449 424
450 // Do some initial checks. 425 // Do some initial checks.
451 if (!pass_initial_checks(caller_method, caller_bci, callee_method)) { 426 if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
452 if (PrintInlining) { 427 if (PrintInlining) print_inlining(callee_method, caller_bci, "failed initial checks");
453 failure_msg = "failed_initial_checks";
454 print_inlining(callee_method, caller_bci, failure_msg);
455 }
456 return NULL; 428 return NULL;
457 } 429 }
458 430
459 // Do some parse checks. 431 // Do some parse checks.
460 failure_msg = check_can_parse(callee_method); 432 failure_msg = check_can_parse(callee_method);
537 if (old_ilt != NULL) { 509 if (old_ilt != NULL) {
538 return old_ilt; 510 return old_ilt;
539 } 511 }
540 int max_inline_level_adjust = 0; 512 int max_inline_level_adjust = 0;
541 if (caller_jvms->method() != NULL) { 513 if (caller_jvms->method() != NULL) {
542 if (caller_jvms->method()->is_method_handle_adapter()) 514 if (caller_jvms->method()->is_compiled_lambda_form())
543 max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames 515 max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames
544 else if (callee_method->is_method_handle_invoke()) { 516 else if (callee_method->is_method_handle_intrinsic() ||
517 callee_method->is_compiled_lambda_form()) {
545 max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implem 518 max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implem
546 } 519 }
547 if (max_inline_level_adjust != 0 && PrintInlining && (Verbose || WizardMode)) { 520 if (max_inline_level_adjust != 0 && PrintInlining && (Verbose || WizardMode)) {
548 CompileTask::print_inline_indent(inline_level()); 521 CompileTask::print_inline_indent(inline_level());
549 tty->print_cr(" \\-> discounting inline depth"); 522 tty->print_cr(" \\-> discounting inline depth");
588 561
589 //-------------------------find_subtree_from_root----------------------------- 562 //-------------------------find_subtree_from_root-----------------------------
590 // Given a jvms, which determines a call chain from the root method, 563 // Given a jvms, which determines a call chain from the root method,
591 // find the corresponding inline tree. 564 // find the corresponding inline tree.
592 // Note: This method will be removed or replaced as InlineTree goes away. 565 // Note: This method will be removed or replaced as InlineTree goes away.
593 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee, bool create_if_not_found) { 566 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
594 InlineTree* iltp = root; 567 InlineTree* iltp = root;
595 uint depth = jvms && jvms->has_method() ? jvms->depth() : 0; 568 uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
596 for (uint d = 1; d <= depth; d++) { 569 for (uint d = 1; d <= depth; d++) {
597 JVMState* jvmsp = jvms->of_depth(d); 570 JVMState* jvmsp = jvms->of_depth(d);
598 // Select the corresponding subtree for this bci. 571 // Select the corresponding subtree for this bci.
599 assert(jvmsp->method() == iltp->method(), "tree still in sync"); 572 assert(jvmsp->method() == iltp->method(), "tree still in sync");
600 ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method(); 573 ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
601 InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee); 574 InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
602 if (!sub) { 575 if (sub == NULL) {
603 if (create_if_not_found && d == depth) { 576 if (d == depth) {
604 return iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci()); 577 sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
605 } 578 }
606 assert(sub != NULL, "should be a sub-ilt here"); 579 guarantee(sub != NULL, "should be a sub-ilt here");
607 return NULL; 580 return sub;
608 } 581 }
609 iltp = sub; 582 iltp = sub;
610 } 583 }
611 return iltp; 584 return iltp;
612 } 585 }