comparison src/share/vm/opto/bytecodeInfo.cpp @ 7473:d092d1b31229

8005071: Incremental inlining for JSR 292 Summary: post parse inlining driven by number of live nodes. Reviewed-by: twisti, kvn, jrose
author roland
date Sun, 23 Dec 2012 17:08:22 +0100
parents ad5dd04754ee
children 989155e2d07a 60bba1398c51
comparison
equal deleted inserted replaced
7445:cd962e15c08e 7473:d092d1b31229
44 _caller_jvms(caller_jvms), 44 _caller_jvms(caller_jvms),
45 _caller_tree((InlineTree*) caller_tree), 45 _caller_tree((InlineTree*) caller_tree),
46 _method(callee), 46 _method(callee),
47 _site_invoke_ratio(site_invoke_ratio), 47 _site_invoke_ratio(site_invoke_ratio),
48 _max_inline_level(max_inline_level), 48 _max_inline_level(max_inline_level),
49 _count_inline_bcs(method()->code_size_for_inlining()) 49 _count_inline_bcs(method()->code_size_for_inlining()),
50 _subtrees(c->comp_arena(), 2, 0, NULL)
50 { 51 {
51 NOT_PRODUCT(_count_inlines = 0;) 52 NOT_PRODUCT(_count_inlines = 0;)
52 if (_caller_jvms != NULL) { 53 if (_caller_jvms != NULL) {
53 // Keep a private copy of the caller_jvms: 54 // Keep a private copy of the caller_jvms:
54 _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms()); 55 _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
207 if (!callee_method->holder()->is_initialized()) return "method holder not initialized"; 208 if (!callee_method->holder()->is_initialized()) return "method holder not initialized";
208 if ( callee_method->is_native()) return "native method"; 209 if ( callee_method->is_native()) return "native method";
209 if ( callee_method->dont_inline()) return "don't inline by annotation"; 210 if ( callee_method->dont_inline()) return "don't inline by annotation";
210 if ( callee_method->has_unloaded_classes_in_signature()) return "unloaded signature classes"; 211 if ( callee_method->has_unloaded_classes_in_signature()) return "unloaded signature classes";
211 212
212 if (callee_method->force_inline() || callee_method->should_inline()) { 213 if (callee_method->should_inline()) {
213 // ignore heuristic controls on inlining 214 // ignore heuristic controls on inlining
214 return NULL; 215 return NULL;
215 } 216 }
216 217
217 // Now perform checks which are heuristic 218 // Now perform checks which are heuristic
218 219
219 if (callee_method->has_compiled_code() && 220 if (!callee_method->force_inline()) {
220 callee_method->instructions_size() > InlineSmallCode) { 221 if (callee_method->has_compiled_code() &&
222 callee_method->instructions_size() > InlineSmallCode) {
221 return "already compiled into a big method"; 223 return "already compiled into a big method";
224 }
222 } 225 }
223 226
224 // don't inline exception code unless the top method belongs to an 227 // don't inline exception code unless the top method belongs to an
225 // exception class 228 // exception class
226 if (caller_tree() != NULL && 229 if (caller_tree() != NULL &&
275 } 278 }
276 279
277 //-----------------------------try_to_inline----------------------------------- 280 //-----------------------------try_to_inline-----------------------------------
278 // return NULL if ok, reason for not inlining otherwise 281 // return NULL if ok, reason for not inlining otherwise
279 // Relocated from "InliningClosure::try_to_inline" 282 // Relocated from "InliningClosure::try_to_inline"
280 const char* InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) { 283 const char* InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result, bool& should_delay) {
281
282 // Old algorithm had funny accumulating BC-size counters 284 // Old algorithm had funny accumulating BC-size counters
283 if (UseOldInlining && ClipInlining 285 if (UseOldInlining && ClipInlining
284 && (int)count_inline_bcs() >= DesiredMethodLimit) { 286 && (int)count_inline_bcs() >= DesiredMethodLimit) {
285 return "size > DesiredMethodLimit"; 287 if (!callee_method->force_inline() || !IncrementalInline) {
288 return "size > DesiredMethodLimit";
289 } else if (!C->inlining_incrementally()) {
290 should_delay = true;
291 }
286 } 292 }
287 293
288 const char *msg = NULL; 294 const char *msg = NULL;
289 msg = should_inline(callee_method, caller_method, caller_bci, profile, wci_result); 295 msg = should_inline(callee_method, caller_method, caller_bci, profile, wci_result);
290 if (msg != NULL) 296 if (msg != NULL)
301 307
302 // suppress a few checks for accessors and trivial methods 308 // suppress a few checks for accessors and trivial methods
303 if (callee_method->code_size() > MaxTrivialSize) { 309 if (callee_method->code_size() > MaxTrivialSize) {
304 310
305 // don't inline into giant methods 311 // don't inline into giant methods
306 if (C->unique() > (uint)NodeCountInliningCutoff) { 312 if (C->over_inlining_cutoff()) {
307 return "NodeCountInliningCutoff"; 313 if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
314 || !IncrementalInline) {
315 return "NodeCountInliningCutoff";
316 } else {
317 should_delay = true;
318 }
308 } 319 }
309 320
310 if ((!UseInterpreter || CompileTheWorld) && 321 if ((!UseInterpreter || CompileTheWorld) &&
311 is_init_with_ea(callee_method, caller_method, C)) { 322 is_init_with_ea(callee_method, caller_method, C)) {
312 323
321 332
322 if (!C->do_inlining() && InlineAccessors) { 333 if (!C->do_inlining() && InlineAccessors) {
323 return "not an accessor"; 334 return "not an accessor";
324 } 335 }
325 if (inline_level() > _max_inline_level) { 336 if (inline_level() > _max_inline_level) {
326 return "inlining too deep"; 337 if (!callee_method->force_inline() || !IncrementalInline) {
338 return "inlining too deep";
339 } else if (!C->inlining_incrementally()) {
340 should_delay = true;
341 }
327 } 342 }
328 343
329 // detect direct and indirect recursive inlining 344 // detect direct and indirect recursive inlining
330 if (!callee_method->is_compiled_lambda_form()) { 345 if (!callee_method->is_compiled_lambda_form()) {
331 // count the current method and the callee 346 // count the current method and the callee
346 361
347 int size = callee_method->code_size_for_inlining(); 362 int size = callee_method->code_size_for_inlining();
348 363
349 if (UseOldInlining && ClipInlining 364 if (UseOldInlining && ClipInlining
350 && (int)count_inline_bcs() + size >= DesiredMethodLimit) { 365 && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
351 return "size > DesiredMethodLimit"; 366 if (!callee_method->force_inline() || !IncrementalInline) {
367 return "size > DesiredMethodLimit";
368 } else if (!C->inlining_incrementally()) {
369 should_delay = true;
370 }
352 } 371 }
353 372
354 // ok, inline this method 373 // ok, inline this method
355 return NULL; 374 return NULL;
356 } 375 }
411 //tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count()); 430 //tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
412 } 431 }
413 } 432 }
414 433
415 //------------------------------ok_to_inline----------------------------------- 434 //------------------------------ok_to_inline-----------------------------------
416 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci) { 435 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci, bool& should_delay) {
417 assert(callee_method != NULL, "caller checks for optimized virtual!"); 436 assert(callee_method != NULL, "caller checks for optimized virtual!");
437 assert(!should_delay, "should be initialized to false");
418 #ifdef ASSERT 438 #ifdef ASSERT
419 // Make sure the incoming jvms has the same information content as me. 439 // Make sure the incoming jvms has the same information content as me.
420 // This means that we can eventually make this whole class AllStatic. 440 // This means that we can eventually make this whole class AllStatic.
421 if (jvms->caller() == NULL) { 441 if (jvms->caller() == NULL) {
422 assert(_caller_jvms == NULL, "redundant instance state"); 442 assert(_caller_jvms == NULL, "redundant instance state");
442 return NULL; 462 return NULL;
443 } 463 }
444 464
445 // Check if inlining policy says no. 465 // Check if inlining policy says no.
446 WarmCallInfo wci = *(initial_wci); 466 WarmCallInfo wci = *(initial_wci);
447 failure_msg = try_to_inline(callee_method, caller_method, caller_bci, profile, &wci); 467 failure_msg = try_to_inline(callee_method, caller_method, caller_bci, profile, &wci, should_delay);
448 if (failure_msg != NULL && C->log() != NULL) { 468 if (failure_msg != NULL && C->log() != NULL) {
449 C->log()->inline_fail(failure_msg); 469 C->log()->inline_fail(failure_msg);
450 } 470 }
451 471
452 #ifndef PRODUCT 472 #ifndef PRODUCT