Mercurial > hg > graal-compiler
comparison src/share/vm/prims/jvmtiRedefineClasses.cpp @ 7963:9bf5f643d1cf
8006542: JSR 292: the VM_RedefineClasses::append_entry() must support invokedynamic entry kinds
Summary: Need a support for invokedynamic entry kinds when new and old constant pools are merged.
Reviewed-by: coleenp, twisti
Contributed-by: serguei.spitsyn@oracle.com
author | sspitsyn |
---|---|
date | Thu, 31 Jan 2013 20:07:18 -0800 |
parents | 8d1fb417a42d |
children | 79c1bb8fce5d |
comparison
equal
deleted
inserted
replaced
7959:7885e162c30f | 7963:9bf5f643d1cf |
---|---|
276 | 276 |
277 // this is an indirect CP entry so it needs special handling | 277 // this is an indirect CP entry so it needs special handling |
278 case JVM_CONSTANT_NameAndType: | 278 case JVM_CONSTANT_NameAndType: |
279 { | 279 { |
280 int name_ref_i = scratch_cp->name_ref_index_at(scratch_i); | 280 int name_ref_i = scratch_cp->name_ref_index_at(scratch_i); |
281 int new_name_ref_i = 0; | 281 int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p, |
282 bool match = (name_ref_i < *merge_cp_length_p) && | 282 merge_cp_length_p, THREAD); |
283 scratch_cp->compare_entry_to(name_ref_i, *merge_cp_p, name_ref_i, | |
284 THREAD); | |
285 if (!match) { | |
286 // forward reference in *merge_cp_p or not a direct match | |
287 | |
288 int found_i = scratch_cp->find_matching_entry(name_ref_i, *merge_cp_p, | |
289 THREAD); | |
290 if (found_i != 0) { | |
291 guarantee(found_i != name_ref_i, | |
292 "compare_entry_to() and find_matching_entry() do not agree"); | |
293 | |
294 // Found a matching entry somewhere else in *merge_cp_p so | |
295 // just need a mapping entry. | |
296 new_name_ref_i = found_i; | |
297 map_index(scratch_cp, name_ref_i, found_i); | |
298 } else { | |
299 // no match found so we have to append this entry to *merge_cp_p | |
300 append_entry(scratch_cp, name_ref_i, merge_cp_p, merge_cp_length_p, | |
301 THREAD); | |
302 // The above call to append_entry() can only append one entry | |
303 // so the post call query of *merge_cp_length_p is only for | |
304 // the sake of consistency. | |
305 new_name_ref_i = *merge_cp_length_p - 1; | |
306 } | |
307 } | |
308 | 283 |
309 int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i); | 284 int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i); |
310 int new_signature_ref_i = 0; | 285 int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i, |
311 match = (signature_ref_i < *merge_cp_length_p) && | 286 merge_cp_p, merge_cp_length_p, |
312 scratch_cp->compare_entry_to(signature_ref_i, *merge_cp_p, | 287 THREAD); |
313 signature_ref_i, THREAD); | |
314 if (!match) { | |
315 // forward reference in *merge_cp_p or not a direct match | |
316 | |
317 int found_i = scratch_cp->find_matching_entry(signature_ref_i, | |
318 *merge_cp_p, THREAD); | |
319 if (found_i != 0) { | |
320 guarantee(found_i != signature_ref_i, | |
321 "compare_entry_to() and find_matching_entry() do not agree"); | |
322 | |
323 // Found a matching entry somewhere else in *merge_cp_p so | |
324 // just need a mapping entry. | |
325 new_signature_ref_i = found_i; | |
326 map_index(scratch_cp, signature_ref_i, found_i); | |
327 } else { | |
328 // no match found so we have to append this entry to *merge_cp_p | |
329 append_entry(scratch_cp, signature_ref_i, merge_cp_p, | |
330 merge_cp_length_p, THREAD); | |
331 // The above call to append_entry() can only append one entry | |
332 // so the post call query of *merge_cp_length_p is only for | |
333 // the sake of consistency. | |
334 new_signature_ref_i = *merge_cp_length_p - 1; | |
335 } | |
336 } | |
337 | 288 |
338 // If the referenced entries already exist in *merge_cp_p, then | 289 // If the referenced entries already exist in *merge_cp_p, then |
339 // both new_name_ref_i and new_signature_ref_i will both be 0. | 290 // both new_name_ref_i and new_signature_ref_i will both be 0. |
340 // In that case, all we are appending is the current entry. | 291 // In that case, all we are appending is the current entry. |
341 if (new_name_ref_i == 0) { | 292 if (new_name_ref_i != name_ref_i) { |
342 new_name_ref_i = name_ref_i; | |
343 } else { | |
344 RC_TRACE(0x00080000, | 293 RC_TRACE(0x00080000, |
345 ("NameAndType entry@%d name_ref_index change: %d to %d", | 294 ("NameAndType entry@%d name_ref_index change: %d to %d", |
346 *merge_cp_length_p, name_ref_i, new_name_ref_i)); | 295 *merge_cp_length_p, name_ref_i, new_name_ref_i)); |
347 } | 296 } |
348 if (new_signature_ref_i == 0) { | 297 if (new_signature_ref_i != signature_ref_i) { |
349 new_signature_ref_i = signature_ref_i; | |
350 } else { | |
351 RC_TRACE(0x00080000, | 298 RC_TRACE(0x00080000, |
352 ("NameAndType entry@%d signature_ref_index change: %d to %d", | 299 ("NameAndType entry@%d signature_ref_index change: %d to %d", |
353 *merge_cp_length_p, signature_ref_i, new_signature_ref_i)); | 300 *merge_cp_length_p, signature_ref_i, new_signature_ref_i)); |
354 } | 301 } |
355 | 302 |
367 case JVM_CONSTANT_Fieldref: // fall through | 314 case JVM_CONSTANT_Fieldref: // fall through |
368 case JVM_CONSTANT_InterfaceMethodref: // fall through | 315 case JVM_CONSTANT_InterfaceMethodref: // fall through |
369 case JVM_CONSTANT_Methodref: | 316 case JVM_CONSTANT_Methodref: |
370 { | 317 { |
371 int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i); | 318 int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i); |
372 int new_klass_ref_i = 0; | 319 int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i, |
373 bool match = (klass_ref_i < *merge_cp_length_p) && | 320 merge_cp_p, merge_cp_length_p, THREAD); |
374 scratch_cp->compare_entry_to(klass_ref_i, *merge_cp_p, klass_ref_i, | 321 |
375 THREAD); | 322 int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i); |
376 if (!match) { | 323 int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i, |
377 // forward reference in *merge_cp_p or not a direct match | 324 merge_cp_p, merge_cp_length_p, THREAD); |
378 | |
379 int found_i = scratch_cp->find_matching_entry(klass_ref_i, *merge_cp_p, | |
380 THREAD); | |
381 if (found_i != 0) { | |
382 guarantee(found_i != klass_ref_i, | |
383 "compare_entry_to() and find_matching_entry() do not agree"); | |
384 | |
385 // Found a matching entry somewhere else in *merge_cp_p so | |
386 // just need a mapping entry. | |
387 new_klass_ref_i = found_i; | |
388 map_index(scratch_cp, klass_ref_i, found_i); | |
389 } else { | |
390 // no match found so we have to append this entry to *merge_cp_p | |
391 append_entry(scratch_cp, klass_ref_i, merge_cp_p, merge_cp_length_p, | |
392 THREAD); | |
393 // The above call to append_entry() can only append one entry | |
394 // so the post call query of *merge_cp_length_p is only for | |
395 // the sake of consistency. Without the optimization where we | |
396 // use JVM_CONSTANT_UnresolvedClass, then up to two entries | |
397 // could be appended. | |
398 new_klass_ref_i = *merge_cp_length_p - 1; | |
399 } | |
400 } | |
401 | |
402 int name_and_type_ref_i = | |
403 scratch_cp->uncached_name_and_type_ref_index_at(scratch_i); | |
404 int new_name_and_type_ref_i = 0; | |
405 match = (name_and_type_ref_i < *merge_cp_length_p) && | |
406 scratch_cp->compare_entry_to(name_and_type_ref_i, *merge_cp_p, | |
407 name_and_type_ref_i, THREAD); | |
408 if (!match) { | |
409 // forward reference in *merge_cp_p or not a direct match | |
410 | |
411 int found_i = scratch_cp->find_matching_entry(name_and_type_ref_i, | |
412 *merge_cp_p, THREAD); | |
413 if (found_i != 0) { | |
414 guarantee(found_i != name_and_type_ref_i, | |
415 "compare_entry_to() and find_matching_entry() do not agree"); | |
416 | |
417 // Found a matching entry somewhere else in *merge_cp_p so | |
418 // just need a mapping entry. | |
419 new_name_and_type_ref_i = found_i; | |
420 map_index(scratch_cp, name_and_type_ref_i, found_i); | |
421 } else { | |
422 // no match found so we have to append this entry to *merge_cp_p | |
423 append_entry(scratch_cp, name_and_type_ref_i, merge_cp_p, | |
424 merge_cp_length_p, THREAD); | |
425 // The above call to append_entry() can append more than | |
426 // one entry so the post call query of *merge_cp_length_p | |
427 // is required in order to get the right index for the | |
428 // JVM_CONSTANT_NameAndType entry. | |
429 new_name_and_type_ref_i = *merge_cp_length_p - 1; | |
430 } | |
431 } | |
432 | |
433 // If the referenced entries already exist in *merge_cp_p, then | |
434 // both new_klass_ref_i and new_name_and_type_ref_i will both be | |
435 // 0. In that case, all we are appending is the current entry. | |
436 if (new_klass_ref_i == 0) { | |
437 new_klass_ref_i = klass_ref_i; | |
438 } | |
439 if (new_name_and_type_ref_i == 0) { | |
440 new_name_and_type_ref_i = name_and_type_ref_i; | |
441 } | |
442 | 325 |
443 const char *entry_name; | 326 const char *entry_name; |
444 switch (scratch_cp->tag_at(scratch_i).value()) { | 327 switch (scratch_cp->tag_at(scratch_i).value()) { |
445 case JVM_CONSTANT_Fieldref: | 328 case JVM_CONSTANT_Fieldref: |
446 entry_name = "Fieldref"; | 329 entry_name = "Fieldref"; |
479 map_index(scratch_cp, scratch_i, *merge_cp_length_p); | 362 map_index(scratch_cp, scratch_i, *merge_cp_length_p); |
480 } | 363 } |
481 (*merge_cp_length_p)++; | 364 (*merge_cp_length_p)++; |
482 } break; | 365 } break; |
483 | 366 |
367 // this is an indirect CP entry so it needs special handling | |
368 case JVM_CONSTANT_MethodType: | |
369 { | |
370 int ref_i = scratch_cp->method_type_index_at(scratch_i); | |
371 int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p, | |
372 merge_cp_length_p, THREAD); | |
373 if (new_ref_i != ref_i) { | |
374 RC_TRACE(0x00080000, | |
375 ("MethodType entry@%d ref_index change: %d to %d", | |
376 *merge_cp_length_p, ref_i, new_ref_i)); | |
377 } | |
378 (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i); | |
379 if (scratch_i != *merge_cp_length_p) { | |
380 // The new entry in *merge_cp_p is at a different index than | |
381 // the new entry in scratch_cp so we need to map the index values. | |
382 map_index(scratch_cp, scratch_i, *merge_cp_length_p); | |
383 } | |
384 (*merge_cp_length_p)++; | |
385 } break; | |
386 | |
387 // this is an indirect CP entry so it needs special handling | |
388 case JVM_CONSTANT_MethodHandle: | |
389 { | |
390 int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i); | |
391 int ref_i = scratch_cp->method_handle_index_at(scratch_i); | |
392 int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p, | |
393 merge_cp_length_p, THREAD); | |
394 if (new_ref_i != ref_i) { | |
395 RC_TRACE(0x00080000, | |
396 ("MethodHandle entry@%d ref_index change: %d to %d", | |
397 *merge_cp_length_p, ref_i, new_ref_i)); | |
398 } | |
399 (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i); | |
400 if (scratch_i != *merge_cp_length_p) { | |
401 // The new entry in *merge_cp_p is at a different index than | |
402 // the new entry in scratch_cp so we need to map the index values. | |
403 map_index(scratch_cp, scratch_i, *merge_cp_length_p); | |
404 } | |
405 (*merge_cp_length_p)++; | |
406 } break; | |
407 | |
408 // this is an indirect CP entry so it needs special handling | |
409 case JVM_CONSTANT_InvokeDynamic: | |
410 { | |
411 // TBD: cross-checks and possible extra appends into CP and bsm operands | |
412 // are needed as well. This issue is tracked by a separate bug 8007037. | |
413 int bss_idx = scratch_cp->invoke_dynamic_bootstrap_specifier_index(scratch_i); | |
414 | |
415 int ref_i = scratch_cp->invoke_dynamic_name_and_type_ref_index_at(scratch_i); | |
416 int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p, | |
417 merge_cp_length_p, THREAD); | |
418 if (new_ref_i != ref_i) { | |
419 RC_TRACE(0x00080000, | |
420 ("InvokeDynamic entry@%d name_and_type ref_index change: %d to %d", | |
421 *merge_cp_length_p, ref_i, new_ref_i)); | |
422 } | |
423 | |
424 (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, bss_idx, new_ref_i); | |
425 if (scratch_i != *merge_cp_length_p) { | |
426 // The new entry in *merge_cp_p is at a different index than | |
427 // the new entry in scratch_cp so we need to map the index values. | |
428 map_index(scratch_cp, scratch_i, *merge_cp_length_p); | |
429 } | |
430 (*merge_cp_length_p)++; | |
431 } break; | |
432 | |
484 // At this stage, Class or UnresolvedClass could be here, but not | 433 // At this stage, Class or UnresolvedClass could be here, but not |
485 // ClassIndex | 434 // ClassIndex |
486 case JVM_CONSTANT_ClassIndex: // fall through | 435 case JVM_CONSTANT_ClassIndex: // fall through |
487 | 436 |
488 // Invalid is used as the tag for the second constant pool entry | 437 // Invalid is used as the tag for the second constant pool entry |
503 jbyte bad_value = scratch_cp->tag_at(scratch_i).value(); | 452 jbyte bad_value = scratch_cp->tag_at(scratch_i).value(); |
504 ShouldNotReachHere(); | 453 ShouldNotReachHere(); |
505 } break; | 454 } break; |
506 } // end switch tag value | 455 } // end switch tag value |
507 } // end append_entry() | 456 } // end append_entry() |
457 | |
458 | |
459 int VM_RedefineClasses::find_or_append_indirect_entry(constantPoolHandle scratch_cp, | |
460 int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) { | |
461 | |
462 int new_ref_i = ref_i; | |
463 bool match = (ref_i < *merge_cp_length_p) && | |
464 scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i, THREAD); | |
465 | |
466 if (!match) { | |
467 // forward reference in *merge_cp_p or not a direct match | |
468 int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p, THREAD); | |
469 if (found_i != 0) { | |
470 guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree"); | |
471 // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry. | |
472 new_ref_i = found_i; | |
473 map_index(scratch_cp, ref_i, found_i); | |
474 } else { | |
475 // no match found so we have to append this entry to *merge_cp_p | |
476 append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD); | |
477 // The above call to append_entry() can only append one entry | |
478 // so the post call query of *merge_cp_length_p is only for | |
479 // the sake of consistency. | |
480 new_ref_i = *merge_cp_length_p - 1; | |
481 } | |
482 } | |
483 | |
484 return new_ref_i; | |
485 } // end find_or_append_indirect_entry() | |
508 | 486 |
509 | 487 |
510 void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class, TRAPS) { | 488 void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class, TRAPS) { |
511 AnnotationArray* save; | 489 AnnotationArray* save; |
512 | 490 |