comparison src/share/vm/interpreter/rewriter.cpp @ 3464:be4ca325525a

Merge.
author Thomas Wuerthinger <thomas@wuerthinger.net>
date Wed, 27 Jul 2011 17:32:44 -0700
parents 75a99b4f1c98 2d4b2b833d29
children 04b9a2566eec
comparison
equal deleted inserted replaced
3239:7c4b4daac19b 3464:be4ca325525a
61 "all cp cache indexes fit in a u2"); 61 "all cp cache indexes fit in a u2");
62 62
63 _have_invoke_dynamic = ((tag_mask & (1 << JVM_CONSTANT_InvokeDynamic)) != 0); 63 _have_invoke_dynamic = ((tag_mask & (1 << JVM_CONSTANT_InvokeDynamic)) != 0);
64 } 64 }
65 65
66 // Unrewrite the bytecodes if an error occurs.
67 void Rewriter::restore_bytecodes() {
68 int len = _methods->length();
69
70 for (int i = len-1; i >= 0; i--) {
71 methodOop method = (methodOop)_methods->obj_at(i);
72 scan_method(method, true);
73 }
74 }
66 75
67 // Creates a constant pool cache given a CPC map 76 // Creates a constant pool cache given a CPC map
68 void Rewriter::make_constant_pool_cache(TRAPS) { 77 void Rewriter::make_constant_pool_cache(TRAPS) {
69 const int length = _cp_cache_map.length(); 78 const int length = _cp_cache_map.length();
70 constantPoolCacheOop cache = 79 constantPoolCacheOop cache =
131 } 140 }
132 } 141 }
133 142
134 143
135 // Rewrite a classfile-order CP index into a native-order CPC index. 144 // Rewrite a classfile-order CP index into a native-order CPC index.
136 void Rewriter::rewrite_member_reference(address bcp, int offset) { 145 void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
137 address p = bcp + offset; 146 address p = bcp + offset;
138 int cp_index = Bytes::get_Java_u2(p); 147 if (!reverse) {
139 int cache_index = cp_entry_to_cp_cache(cp_index); 148 int cp_index = Bytes::get_Java_u2(p);
140 Bytes::put_native_u2(p, cache_index); 149 int cache_index = cp_entry_to_cp_cache(cp_index);
141 } 150 Bytes::put_native_u2(p, cache_index);
142 151 } else {
143 152 int cache_index = Bytes::get_native_u2(p);
144 void Rewriter::rewrite_invokedynamic(address bcp, int offset) { 153 int pool_index = cp_cache_entry_pool_index(cache_index);
154 Bytes::put_Java_u2(p, pool_index);
155 }
156 }
157
158
159 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
145 address p = bcp + offset; 160 address p = bcp + offset;
146 assert(p[-1] == Bytecodes::_invokedynamic, ""); 161 assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
147 int cp_index = Bytes::get_Java_u2(p); 162 if (!reverse) {
148 int cpc = maybe_add_cp_cache_entry(cp_index); // add lazily 163 int cp_index = Bytes::get_Java_u2(p);
149 int cpc2 = add_secondary_cp_cache_entry(cpc); 164 int cpc = maybe_add_cp_cache_entry(cp_index); // add lazily
150 165 int cpc2 = add_secondary_cp_cache_entry(cpc);
151 // Replace the trailing four bytes with a CPC index for the dynamic 166
152 // call site. Unlike other CPC entries, there is one per bytecode, 167 // Replace the trailing four bytes with a CPC index for the dynamic
153 // not just one per distinct CP entry. In other words, the 168 // call site. Unlike other CPC entries, there is one per bytecode,
154 // CPC-to-CP relation is many-to-one for invokedynamic entries. 169 // not just one per distinct CP entry. In other words, the
155 // This means we must use a larger index size than u2 to address 170 // CPC-to-CP relation is many-to-one for invokedynamic entries.
156 // all these entries. That is the main reason invokedynamic 171 // This means we must use a larger index size than u2 to address
157 // must have a five-byte instruction format. (Of course, other JVM 172 // all these entries. That is the main reason invokedynamic
158 // implementations can use the bytes for other purposes.) 173 // must have a five-byte instruction format. (Of course, other JVM
159 Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2)); 174 // implementations can use the bytes for other purposes.)
160 // Note: We use native_u4 format exclusively for 4-byte indexes. 175 Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2));
176 // Note: We use native_u4 format exclusively for 4-byte indexes.
177 } else {
178 int cache_index = constantPoolCacheOopDesc::decode_secondary_index(
179 Bytes::get_native_u4(p));
180 int secondary_index = cp_cache_secondary_entry_main_index(cache_index);
181 int pool_index = cp_cache_entry_pool_index(secondary_index);
182 assert(_pool->tag_at(pool_index).is_invoke_dynamic(), "wrong index");
183 // zero out 4 bytes
184 Bytes::put_Java_u4(p, 0);
185 Bytes::put_Java_u2(p, pool_index);
186 }
161 } 187 }
162 188
163 189
164 // Rewrite some ldc bytecodes to _fast_aldc 190 // Rewrite some ldc bytecodes to _fast_aldc
165 void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide) { 191 void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
166 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), ""); 192 bool reverse) {
167 address p = bcp + offset; 193 if (!reverse) {
168 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p); 194 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
169 constantTag tag = _pool->tag_at(cp_index).value(); 195 address p = bcp + offset;
170 if (tag.is_method_handle() || tag.is_method_type()) { 196 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
171 int cache_index = cp_entry_to_cp_cache(cp_index); 197 constantTag tag = _pool->tag_at(cp_index).value();
172 if (is_wide) { 198 if (tag.is_method_handle() || tag.is_method_type()) {
173 (*bcp) = Bytecodes::_fast_aldc_w; 199 int cache_index = cp_entry_to_cp_cache(cp_index);
174 assert(cache_index == (u2)cache_index, ""); 200 if (is_wide) {
175 Bytes::put_native_u2(p, cache_index); 201 (*bcp) = Bytecodes::_fast_aldc_w;
176 } else { 202 assert(cache_index == (u2)cache_index, "index overflow");
177 (*bcp) = Bytecodes::_fast_aldc; 203 Bytes::put_native_u2(p, cache_index);
178 assert(cache_index == (u1)cache_index, ""); 204 } else {
179 (*p) = (u1)cache_index; 205 (*bcp) = Bytecodes::_fast_aldc;
206 assert(cache_index == (u1)cache_index, "index overflow");
207 (*p) = (u1)cache_index;
208 }
209 }
210 } else {
211 Bytecodes::Code rewritten_bc =
212 (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
213 if ((*bcp) == rewritten_bc) {
214 address p = bcp + offset;
215 int cache_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
216 int pool_index = cp_cache_entry_pool_index(cache_index);
217 if (is_wide) {
218 (*bcp) = Bytecodes::_ldc_w;
219 assert(pool_index == (u2)pool_index, "index overflow");
220 Bytes::put_Java_u2(p, pool_index);
221 } else {
222 (*bcp) = Bytecodes::_ldc;
223 assert(pool_index == (u1)pool_index, "index overflow");
224 (*p) = (u1)pool_index;
225 }
180 } 226 }
181 } 227 }
182 } 228 }
183 229
184 230
185 // Rewrites a method given the index_map information 231 // Rewrites a method given the index_map information
186 void Rewriter::scan_method(methodOop method) { 232 void Rewriter::scan_method(methodOop method, bool reverse) {
187 233
188 int nof_jsrs = 0; 234 int nof_jsrs = 0;
189 bool has_monitor_bytecodes = false; 235 bool has_monitor_bytecodes = false;
190 236
191 { 237 {
236 ); 282 );
237 } 283 }
238 #endif 284 #endif
239 break; 285 break;
240 } 286 }
287 case Bytecodes::_fast_linearswitch:
288 case Bytecodes::_fast_binaryswitch: {
289 #ifndef CC_INTERP
290 (*bcp) = Bytecodes::_lookupswitch;
291 #endif
292 break;
293 }
241 case Bytecodes::_getstatic : // fall through 294 case Bytecodes::_getstatic : // fall through
242 case Bytecodes::_putstatic : // fall through 295 case Bytecodes::_putstatic : // fall through
243 case Bytecodes::_getfield : // fall through 296 case Bytecodes::_getfield : // fall through
244 case Bytecodes::_putfield : // fall through 297 case Bytecodes::_putfield : // fall through
245 case Bytecodes::_invokevirtual : // fall through 298 case Bytecodes::_invokevirtual : // fall through
246 case Bytecodes::_invokespecial : // fall through 299 case Bytecodes::_invokespecial : // fall through
247 case Bytecodes::_invokestatic : 300 case Bytecodes::_invokestatic :
248 case Bytecodes::_invokeinterface: 301 case Bytecodes::_invokeinterface:
249 rewrite_member_reference(bcp, prefix_length+1); 302 rewrite_member_reference(bcp, prefix_length+1, reverse);
250 break; 303 break;
251 case Bytecodes::_invokedynamic: 304 case Bytecodes::_invokedynamic:
252 rewrite_invokedynamic(bcp, prefix_length+1); 305 rewrite_invokedynamic(bcp, prefix_length+1, reverse);
253 break; 306 break;
254 case Bytecodes::_ldc: 307 case Bytecodes::_ldc:
255 maybe_rewrite_ldc(bcp, prefix_length+1, false); 308 case Bytecodes::_fast_aldc:
309 maybe_rewrite_ldc(bcp, prefix_length+1, false, reverse);
256 break; 310 break;
257 case Bytecodes::_ldc_w: 311 case Bytecodes::_ldc_w:
258 maybe_rewrite_ldc(bcp, prefix_length+1, true); 312 case Bytecodes::_fast_aldc_w:
313 maybe_rewrite_ldc(bcp, prefix_length+1, true, reverse);
259 break; 314 break;
260 case Bytecodes::_jsr : // fall through 315 case Bytecodes::_jsr : // fall through
261 case Bytecodes::_jsr_w : nof_jsrs++; break; 316 case Bytecodes::_jsr_w : nof_jsrs++; break;
262 case Bytecodes::_monitorenter : // fall through 317 case Bytecodes::_monitorenter : // fall through
263 case Bytecodes::_monitorexit : has_monitor_bytecodes = true; break; 318 case Bytecodes::_monitorexit : has_monitor_bytecodes = true; break;
273 // The present of a jsr bytecode implies that the method might potentially 328 // The present of a jsr bytecode implies that the method might potentially
274 // have to be rewritten, so we run the oopMapGenerator on the method 329 // have to be rewritten, so we run the oopMapGenerator on the method
275 if (nof_jsrs > 0) { 330 if (nof_jsrs > 0) {
276 method->set_has_jsrs(); 331 method->set_has_jsrs();
277 // Second pass will revisit this method. 332 // Second pass will revisit this method.
278 assert(method->has_jsrs(), ""); 333 assert(method->has_jsrs(), "didn't we just set this?");
279 } 334 }
280 } 335 }
281 336
282 // After constant pool is created, revisit methods containing jsrs. 337 // After constant pool is created, revisit methods containing jsrs.
283 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) { 338 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
339 ResourceMark rm(THREAD);
284 ResolveOopMapConflicts romc(method); 340 ResolveOopMapConflicts romc(method);
285 methodHandle original_method = method; 341 methodHandle original_method = method;
286 method = romc.do_potential_rewrite(CHECK_(methodHandle())); 342 method = romc.do_potential_rewrite(CHECK_(methodHandle()));
287 if (method() != original_method()) { 343 if (method() != original_method()) {
288 // Insert invalid bytecode into original methodOop and set 344 // Insert invalid bytecode into original methodOop and set
299 method->set_guaranteed_monitor_matching(); 355 method->set_guaranteed_monitor_matching();
300 } 356 }
301 357
302 return method; 358 return method;
303 } 359 }
304
305 360
306 void Rewriter::rewrite(instanceKlassHandle klass, TRAPS) { 361 void Rewriter::rewrite(instanceKlassHandle klass, TRAPS) {
307 ResourceMark rm(THREAD); 362 ResourceMark rm(THREAD);
308 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK); 363 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK);
309 // (That's all, folks.) 364 // (That's all, folks.)
343 } 398 }
344 assert(did_rewrite, "must find Object::<init> to rewrite it"); 399 assert(did_rewrite, "must find Object::<init> to rewrite it");
345 } 400 }
346 401
347 // rewrite methods, in two passes 402 // rewrite methods, in two passes
348 int i, len = _methods->length(); 403 int len = _methods->length();
349 404
350 for (i = len; --i >= 0; ) { 405 for (int i = len-1; i >= 0; i--) {
351 methodOop method = (methodOop)_methods->obj_at(i); 406 methodOop method = (methodOop)_methods->obj_at(i);
352 scan_method(method); 407 scan_method(method);
353 } 408 }
354 409
355 // allocate constant pool cache, now that we've seen all the bytecodes 410 // allocate constant pool cache, now that we've seen all the bytecodes
356 make_constant_pool_cache(CHECK); 411 make_constant_pool_cache(THREAD);
357 412
358 for (i = len; --i >= 0; ) { 413 // Restore bytecodes to their unrewritten state if there are exceptions
359 methodHandle m(THREAD, (methodOop)_methods->obj_at(i)); 414 // rewriting bytecodes or allocating the cpCache
415 if (HAS_PENDING_EXCEPTION) {
416 restore_bytecodes();
417 return;
418 }
419 }
420
421 // Relocate jsr/rets in a method. This can't be done with the rewriter
422 // stage because it can throw other exceptions, leaving the bytecodes
423 // pointing at constant pool cache entries.
424 // Link and check jvmti dependencies while we're iterating over the methods.
425 // JSR292 code calls with a different set of methods, so two entry points.
426 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) {
427 objArrayHandle methods(THREAD, this_oop->methods());
428 relocate_and_link(this_oop, methods, THREAD);
429 }
430
431 void Rewriter::relocate_and_link(instanceKlassHandle this_oop,
432 objArrayHandle methods, TRAPS) {
433 int len = methods->length();
434 for (int i = len-1; i >= 0; i--) {
435 methodHandle m(THREAD, (methodOop)methods->obj_at(i));
360 436
361 if (m->has_jsrs()) { 437 if (m->has_jsrs()) {
362 m = rewrite_jsrs(m, CHECK); 438 m = rewrite_jsrs(m, CHECK);
363 // Method might have gotten rewritten. 439 // Method might have gotten rewritten.
364 _methods->obj_at_put(i, m()); 440 methods->obj_at_put(i, m());
365 } 441 }
366 442
367 // Set up method entry points for compiler and interpreter. 443 // Set up method entry points for compiler and interpreter .
368 m->link_method(m, CHECK); 444 m->link_method(m, CHECK);
369 445
446 // This is for JVMTI and unrelated to relocator but the last thing we do
370 #ifdef ASSERT 447 #ifdef ASSERT
371 if (StressMethodComparator) { 448 if (StressMethodComparator) {
372 static int nmc = 0; 449 static int nmc = 0;
373 for (int j = i; j >= 0 && j >= i-4; j--) { 450 for (int j = i; j >= 0 && j >= i-4; j--) {
374 if ((++nmc % 1000) == 0) tty->print_cr("Have run MethodComparator %d times...", nmc); 451 if ((++nmc % 1000) == 0) tty->print_cr("Have run MethodComparator %d times...", nmc);
375 bool z = MethodComparator::methods_EMCP(m(), (methodOop)_methods->obj_at(j)); 452 bool z = MethodComparator::methods_EMCP(m(),
453 (methodOop)methods->obj_at(j));
376 if (j == i && !z) { 454 if (j == i && !z) {
377 tty->print("MethodComparator FAIL: "); m->print(); m->print_codes(); 455 tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
378 assert(z, "method must compare equal to itself"); 456 assert(z, "method must compare equal to itself");
379 } 457 }
380 } 458 }