Mercurial > hg > graal-compiler
comparison src/share/vm/opto/doCall.cpp @ 12816:2720ab7a0d70
Merge
author | ccheung |
---|---|
date | Fri, 04 Oct 2013 21:00:43 -0700 |
parents | 29bdcf12457c |
children | 3213ba4d3dff |
comparison
equal
deleted
inserted
replaced
12815:4212bfb33d76 | 12816:2720ab7a0d70 |
---|---|
108 // Special case the handling of certain common, profitable library | 108 // Special case the handling of certain common, profitable library |
109 // methods. If these methods are replaced with specialized code, | 109 // methods. If these methods are replaced with specialized code, |
110 // then we return it as the inlined version of the call. | 110 // then we return it as the inlined version of the call. |
111 // We do this before the strict f.p. check below because the | 111 // We do this before the strict f.p. check below because the |
112 // intrinsics handle strict f.p. correctly. | 112 // intrinsics handle strict f.p. correctly. |
113 CallGenerator* cg_intrinsic = NULL; | |
113 if (allow_inline && allow_intrinsics) { | 114 if (allow_inline && allow_intrinsics) { |
114 CallGenerator* cg = find_intrinsic(callee, call_does_dispatch); | 115 CallGenerator* cg = find_intrinsic(callee, call_does_dispatch); |
115 if (cg != NULL) { | 116 if (cg != NULL) { |
116 if (cg->is_predicted()) { | 117 if (cg->is_predicted()) { |
117 // Code without intrinsic but, hopefully, inlined. | 118 // Code without intrinsic but, hopefully, inlined. |
119 vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, false); | 120 vtable_index, call_does_dispatch, jvms, allow_inline, prof_factor, false); |
120 if (inline_cg != NULL) { | 121 if (inline_cg != NULL) { |
121 cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg); | 122 cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg); |
122 } | 123 } |
123 } | 124 } |
124 return cg; | 125 |
126 // If intrinsic does the virtual dispatch, we try to use the type profile | |
127 // first, and hopefully inline it as the regular virtual call below. | |
128 // We will retry the intrinsic if nothing had claimed it afterwards. | |
129 if (cg->does_virtual_dispatch()) { | |
130 cg_intrinsic = cg; | |
131 cg = NULL; | |
132 } else { | |
133 return cg; | |
134 } | |
125 } | 135 } |
126 } | 136 } |
127 | 137 |
128 // Do method handle calls. | 138 // Do method handle calls. |
129 // NOTE: This must happen before normal inlining logic below since | 139 // NOTE: This must happen before normal inlining logic below since |
264 } | 274 } |
265 } | 275 } |
266 } | 276 } |
267 } | 277 } |
268 | 278 |
279 // Nothing claimed the intrinsic, we go with straight-forward inlining | |
280 // for already discovered intrinsic. | |
281 if (allow_inline && allow_intrinsics && cg_intrinsic != NULL) { | |
282 assert(cg_intrinsic->does_virtual_dispatch(), "sanity"); | |
283 return cg_intrinsic; | |
284 } | |
285 | |
269 // There was no special inlining tactic, or it bailed out. | 286 // There was no special inlining tactic, or it bailed out. |
270 // Use a more generic tactic, like a simple call. | 287 // Use a more generic tactic, like a simple call. |
271 if (call_does_dispatch) { | 288 if (call_does_dispatch) { |
272 return CallGenerator::for_virtual_call(callee, vtable_index); | 289 return CallGenerator::for_virtual_call(callee, vtable_index); |
273 } else { | 290 } else { |