comparison src/share/vm/code/nmethod.cpp @ 20804:7848fc12602b

Merge with jdk8u40-b25
author Gilles Duboscq <gilles.m.duboscq@oracle.com>
date Tue, 07 Apr 2015 14:58:49 +0200
parents f8147c931ce4 b12a2a9b05ca
children eb21f2944d7d
comparison
equal deleted inserted replaced
20184:84105dcdb05b 20804:7848fc12602b
35 #include "compiler/disassembler.hpp" 35 #include "compiler/disassembler.hpp"
36 #include "interpreter/bytecode.hpp" 36 #include "interpreter/bytecode.hpp"
37 #include "oops/methodData.hpp" 37 #include "oops/methodData.hpp"
38 #include "prims/jvmtiRedefineClassesTrace.hpp" 38 #include "prims/jvmtiRedefineClassesTrace.hpp"
39 #include "prims/jvmtiImpl.hpp" 39 #include "prims/jvmtiImpl.hpp"
40 #include "runtime/orderAccess.inline.hpp"
40 #include "runtime/sharedRuntime.hpp" 41 #include "runtime/sharedRuntime.hpp"
41 #include "runtime/sweeper.hpp" 42 #include "runtime/sweeper.hpp"
42 #include "utilities/dtrace.hpp" 43 #include "utilities/dtrace.hpp"
43 #include "utilities/events.hpp" 44 #include "utilities/events.hpp"
44 #include "utilities/xmlstream.hpp" 45 #include "utilities/xmlstream.hpp"
48 #ifdef GRAAL 49 #ifdef GRAAL
49 #include "graal/graalJavaAccess.hpp" 50 #include "graal/graalJavaAccess.hpp"
50 #endif 51 #endif
51 52
52 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC 53 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
54
55 unsigned char nmethod::_global_unloading_clock = 0;
53 56
54 #ifdef DTRACE_ENABLED 57 #ifdef DTRACE_ENABLED
55 58
56 // Only bother with this argument setup if dtrace is available 59 // Only bother with this argument setup if dtrace is available
57 60
443 new_entry->set_next(exception_cache()); 446 new_entry->set_next(exception_cache());
444 } 447 }
445 set_exception_cache(new_entry); 448 set_exception_cache(new_entry);
446 } 449 }
447 450
448 void nmethod::remove_from_exception_cache(ExceptionCache* ec) { 451 void nmethod::clean_exception_cache(BoolObjectClosure* is_alive) {
449 ExceptionCache* prev = NULL; 452 ExceptionCache* prev = NULL;
450 ExceptionCache* curr = exception_cache(); 453 ExceptionCache* curr = exception_cache();
451 assert(curr != NULL, "nothing to remove"); 454
452 // find the previous and next entry of ec 455 while (curr != NULL) {
453 while (curr != ec) { 456 ExceptionCache* next = curr->next();
454 prev = curr; 457
455 curr = curr->next(); 458 Klass* ex_klass = curr->exception_type();
456 assert(curr != NULL, "ExceptionCache not found"); 459 if (ex_klass != NULL && !ex_klass->is_loader_alive(is_alive)) {
457 } 460 if (prev == NULL) {
458 // now: curr == ec 461 set_exception_cache(next);
459 ExceptionCache* next = curr->next(); 462 } else {
460 if (prev == NULL) { 463 prev->set_next(next);
461 set_exception_cache(next); 464 }
462 } else { 465 delete curr;
463 prev->set_next(next); 466 // prev stays the same.
464 } 467 } else {
465 delete curr; 468 prev = curr;
466 } 469 }
467 470
471 curr = next;
472 }
473 }
468 474
469 // public method for accessing the exception cache 475 // public method for accessing the exception cache
470 // These are the public access methods. 476 // These are the public access methods.
471 address nmethod::handler_for_exception_and_pc(Handle exception, address pc) { 477 address nmethod::handler_for_exception_and_pc(Handle exception, address pc) {
472 // We never grab a lock to read the exception cache, so we may 478 // We never grab a lock to read the exception cache, so we may
522 } 528 }
523 529
524 // Fill in default values for various flag fields 530 // Fill in default values for various flag fields
525 void nmethod::init_defaults() { 531 void nmethod::init_defaults() {
526 _state = in_use; 532 _state = in_use;
533 _unloading_clock = 0;
527 _marked_for_reclamation = 0; 534 _marked_for_reclamation = 0;
528 _has_flushed_dependencies = 0; 535 _has_flushed_dependencies = 0;
529 _has_unsafe_access = 0; 536 _has_unsafe_access = 0;
530 _has_method_handle_invokes = 0; 537 _has_method_handle_invokes = 0;
531 _lazy_critical_native = 0; 538 _lazy_critical_native = 0;
540 #endif 547 #endif
541 548
542 _oops_do_mark_link = NULL; 549 _oops_do_mark_link = NULL;
543 _jmethod_id = NULL; 550 _jmethod_id = NULL;
544 _osr_link = NULL; 551 _osr_link = NULL;
545 _scavenge_root_link = NULL; 552 if (UseG1GC) {
553 _unloading_next = NULL;
554 } else {
555 _scavenge_root_link = NULL;
556 }
546 _scavenge_root_state = 0; 557 _scavenge_root_state = 0;
547 _compiler = NULL; 558 _compiler = NULL;
548 #if INCLUDE_RTM_OPT 559 #if INCLUDE_RTM_OPT
549 _rtm_state = NoRTM; 560 _rtm_state = NoRTM;
550 #endif 561 #endif
761 _exception_cache = NULL; 772 _exception_cache = NULL;
762 _pc_desc_cache.reset_to(NULL); 773 _pc_desc_cache.reset_to(NULL);
763 _hotness_counter = NMethodSweeper::hotness_counter_reset_val(); 774 _hotness_counter = NMethodSweeper::hotness_counter_reset_val();
764 775
765 code_buffer->copy_values_to(this); 776 code_buffer->copy_values_to(this);
766 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 777 if (ScavengeRootsInCode) {
767 CodeCache::add_scavenge_root_nmethod(this); 778 if (detect_scavenge_root_oops()) {
779 CodeCache::add_scavenge_root_nmethod(this);
780 }
768 Universe::heap()->register_nmethod(this); 781 Universe::heap()->register_nmethod(this);
769 } 782 }
770 debug_only(verify_scavenge_root_oops()); 783 debug_only(verify_scavenge_root_oops());
771 CodeCache::commit(this); 784 CodeCache::commit(this);
772 } 785 }
846 _exception_cache = NULL; 859 _exception_cache = NULL;
847 _pc_desc_cache.reset_to(NULL); 860 _pc_desc_cache.reset_to(NULL);
848 _hotness_counter = NMethodSweeper::hotness_counter_reset_val(); 861 _hotness_counter = NMethodSweeper::hotness_counter_reset_val();
849 862
850 code_buffer->copy_values_to(this); 863 code_buffer->copy_values_to(this);
851 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 864 if (ScavengeRootsInCode) {
852 CodeCache::add_scavenge_root_nmethod(this); 865 if (detect_scavenge_root_oops()) {
866 CodeCache::add_scavenge_root_nmethod(this);
867 }
853 Universe::heap()->register_nmethod(this); 868 Universe::heap()->register_nmethod(this);
854 } 869 }
855 DEBUG_ONLY(verify_scavenge_root_oops();) 870 DEBUG_ONLY(verify_scavenge_root_oops();)
856 CodeCache::commit(this); 871 CodeCache::commit(this);
857 } 872 }
994 1009
995 // Copy contents of ScopeDescRecorder to nmethod 1010 // Copy contents of ScopeDescRecorder to nmethod
996 code_buffer->copy_values_to(this); 1011 code_buffer->copy_values_to(this);
997 debug_info->copy_to(this); 1012 debug_info->copy_to(this);
998 dependencies->copy_to(this); 1013 dependencies->copy_to(this);
999 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 1014 if (ScavengeRootsInCode) {
1000 CodeCache::add_scavenge_root_nmethod(this); 1015 if (detect_scavenge_root_oops()) {
1016 CodeCache::add_scavenge_root_nmethod(this);
1017 }
1001 Universe::heap()->register_nmethod(this); 1018 Universe::heap()->register_nmethod(this);
1002 } 1019 }
1003 debug_only(verify_scavenge_root_oops()); 1020 debug_only(verify_scavenge_root_oops());
1004 1021
1005 CodeCache::commit(this); 1022 CodeCache::commit(this);
1262 RelocIterator iter(this, low_boundary); 1279 RelocIterator iter(this, low_boundary);
1263 while(iter.next()) { 1280 while(iter.next()) {
1264 switch(iter.type()) { 1281 switch(iter.type()) {
1265 case relocInfo::virtual_call_type: 1282 case relocInfo::virtual_call_type:
1266 case relocInfo::opt_virtual_call_type: { 1283 case relocInfo::opt_virtual_call_type: {
1267 CompiledIC *ic = CompiledIC_at(iter.reloc()); 1284 CompiledIC *ic = CompiledIC_at(&iter);
1268 // Ok, to lookup references to zombies here 1285 // Ok, to lookup references to zombies here
1269 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination()); 1286 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1270 if( cb != NULL && cb->is_nmethod() ) { 1287 if( cb != NULL && cb->is_nmethod() ) {
1271 nmethod* nm = (nmethod*)cb; 1288 nmethod* nm = (nmethod*)cb;
1272 // Clean inline caches pointing to both zombie and not_entrant methods 1289 // Clean inline caches pointing to both zombie and not_entrant methods
1286 } 1303 }
1287 } 1304 }
1288 } 1305 }
1289 } 1306 }
1290 1307
1308 void nmethod::verify_clean_inline_caches() {
1309 assert_locked_or_safepoint(CompiledIC_lock);
1310
1311 // If the method is not entrant or zombie then a JMP is plastered over the
1312 // first few bytes. If an oop in the old code was there, that oop
1313 // should not get GC'd. Skip the first few bytes of oops on
1314 // not-entrant methods.
1315 address low_boundary = verified_entry_point();
1316 if (!is_in_use()) {
1317 low_boundary += NativeJump::instruction_size;
1318 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1319 // This means that the low_boundary is going to be a little too high.
1320 // This shouldn't matter, since oops of non-entrant methods are never used.
1321 // In fact, why are we bothering to look at oops in a non-entrant method??
1322 }
1323
1324 ResourceMark rm;
1325 RelocIterator iter(this, low_boundary);
1326 while(iter.next()) {
1327 switch(iter.type()) {
1328 case relocInfo::virtual_call_type:
1329 case relocInfo::opt_virtual_call_type: {
1330 CompiledIC *ic = CompiledIC_at(&iter);
1331 // Ok, to lookup references to zombies here
1332 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
1333 if( cb != NULL && cb->is_nmethod() ) {
1334 nmethod* nm = (nmethod*)cb;
1335 // Verify that inline caches pointing to both zombie and not_entrant methods are clean
1336 if (!nm->is_in_use() || (nm->method()->code() != nm)) {
1337 assert(ic->is_clean(), "IC should be clean");
1338 }
1339 }
1340 break;
1341 }
1342 case relocInfo::static_call_type: {
1343 CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
1344 CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
1345 if( cb != NULL && cb->is_nmethod() ) {
1346 nmethod* nm = (nmethod*)cb;
1347 // Verify that inline caches pointing to both zombie and not_entrant methods are clean
1348 if (!nm->is_in_use() || (nm->method()->code() != nm)) {
1349 assert(csc->is_clean(), "IC should be clean");
1350 }
1351 }
1352 break;
1353 }
1354 }
1355 }
1356 }
1357
1358 int nmethod::verify_icholder_relocations() {
1359 int count = 0;
1360
1361 RelocIterator iter(this);
1362 while(iter.next()) {
1363 if (iter.type() == relocInfo::virtual_call_type) {
1364 if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc())) {
1365 CompiledIC *ic = CompiledIC_at(&iter);
1366 if (TraceCompiledIC) {
1367 tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder()));
1368 ic->print();
1369 }
1370 assert(ic->cached_icholder() != NULL, "must be non-NULL");
1371 count++;
1372 }
1373 }
1374 }
1375
1376 return count;
1377 }
1378
1291 // This is a private interface with the sweeper. 1379 // This is a private interface with the sweeper.
1292 void nmethod::mark_as_seen_on_stack() { 1380 void nmethod::mark_as_seen_on_stack() {
1293 assert(is_alive(), "Must be an alive method"); 1381 assert(is_alive(), "Must be an alive method");
1294 // Set the traversal mark to ensure that the sweeper does 2 1382 // Set the traversal mark to ensure that the sweeper does 2
1295 // cleaning passes before moving to zombie. 1383 // cleaning passes before moving to zombie.
1318 if (mdo == NULL) return; 1406 if (mdo == NULL) return;
1319 // There is a benign race here. See comments in methodData.hpp. 1407 // There is a benign race here. See comments in methodData.hpp.
1320 mdo->inc_decompile_count(); 1408 mdo->inc_decompile_count();
1321 } 1409 }
1322 1410
1411 void nmethod::increase_unloading_clock() {
1412 _global_unloading_clock++;
1413 if (_global_unloading_clock == 0) {
1414 // _nmethods are allocated with _unloading_clock == 0,
1415 // so 0 is never used as a clock value.
1416 _global_unloading_clock = 1;
1417 }
1418 }
1419
1420 void nmethod::set_unloading_clock(unsigned char unloading_clock) {
1421 OrderAccess::release_store((volatile jubyte*)&_unloading_clock, unloading_clock);
1422 }
1423
1424 unsigned char nmethod::unloading_clock() {
1425 return (unsigned char)OrderAccess::load_acquire((volatile jubyte*)&_unloading_clock);
1426 }
1427
1323 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) { 1428 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
1324 1429
1325 post_compiled_method_unload(); 1430 post_compiled_method_unload();
1326 1431
1327 // Since this nmethod is being unloaded, make sure that dependencies 1432 // Since this nmethod is being unloaded, make sure that dependencies
1374 // Transitioning directly from live to unloaded -- so 1479 // Transitioning directly from live to unloaded -- so
1375 // we need to force a cache clean-up; remember this 1480 // we need to force a cache clean-up; remember this
1376 // for later on. 1481 // for later on.
1377 CodeCache::set_needs_cache_clean(true); 1482 CodeCache::set_needs_cache_clean(true);
1378 } 1483 }
1484
1485 // Unregister must be done before the state change
1486 Universe::heap()->unregister_nmethod(this);
1487
1379 _state = unloaded; 1488 _state = unloaded;
1380 1489
1381 // Log the unloading. 1490 // Log the unloading.
1382 log_state_change(); 1491 log_state_change();
1383 1492
1735 // attempt to report the event in the unlikely scenario where the 1844 // attempt to report the event in the unlikely scenario where the
1736 // event is enabled at the time the nmethod is made a zombie. 1845 // event is enabled at the time the nmethod is made a zombie.
1737 set_unload_reported(); 1846 set_unload_reported();
1738 } 1847 }
1739 1848
1849 void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive, bool mark_on_stack) {
1850 if (ic->is_icholder_call()) {
1851 // The only exception is compiledICHolder oops which may
1852 // yet be marked below. (We check this further below).
1853 CompiledICHolder* cichk_oop = ic->cached_icholder();
1854
1855 if (mark_on_stack) {
1856 Metadata::mark_on_stack(cichk_oop->holder_method());
1857 Metadata::mark_on_stack(cichk_oop->holder_klass());
1858 }
1859
1860 if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
1861 cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
1862 return;
1863 }
1864 } else {
1865 Metadata* ic_oop = ic->cached_metadata();
1866 if (ic_oop != NULL) {
1867 if (mark_on_stack) {
1868 Metadata::mark_on_stack(ic_oop);
1869 }
1870
1871 if (ic_oop->is_klass()) {
1872 if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1873 return;
1874 }
1875 } else if (ic_oop->is_method()) {
1876 if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1877 return;
1878 }
1879 } else {
1880 ShouldNotReachHere();
1881 }
1882 }
1883 }
1884
1885 ic->set_to_clean();
1886 }
1887
1740 // This is called at the end of the strong tracing/marking phase of a 1888 // This is called at the end of the strong tracing/marking phase of a
1741 // GC to unload an nmethod if it contains otherwise unreachable 1889 // GC to unload an nmethod if it contains otherwise unreachable
1742 // oops. 1890 // oops.
1743 1891
1744 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) { 1892 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
1788 } 1936 }
1789 } 1937 }
1790 #endif 1938 #endif
1791 1939
1792 // Exception cache 1940 // Exception cache
1793 ExceptionCache* ec = exception_cache(); 1941 clean_exception_cache(is_alive);
1794 while (ec != NULL) {
1795 Klass* ex_klass = ec->exception_type();
1796 ExceptionCache* next_ec = ec->next();
1797 if (ex_klass != NULL && !ex_klass->is_loader_alive(is_alive)) {
1798 remove_from_exception_cache(ec);
1799 }
1800 ec = next_ec;
1801 }
1802 1942
1803 // If class unloading occurred we first iterate over all inline caches and 1943 // If class unloading occurred we first iterate over all inline caches and
1804 // clear ICs where the cached oop is referring to an unloaded klass or method. 1944 // clear ICs where the cached oop is referring to an unloaded klass or method.
1805 // The remaining live cached oops will be traversed in the relocInfo::oop_type 1945 // The remaining live cached oops will be traversed in the relocInfo::oop_type
1806 // iteration below. 1946 // iteration below.
1807 if (unloading_occurred) { 1947 if (unloading_occurred) {
1808 RelocIterator iter(this, low_boundary); 1948 RelocIterator iter(this, low_boundary);
1809 while(iter.next()) { 1949 while(iter.next()) {
1810 if (iter.type() == relocInfo::virtual_call_type) { 1950 if (iter.type() == relocInfo::virtual_call_type) {
1811 CompiledIC *ic = CompiledIC_at(iter.reloc()); 1951 CompiledIC *ic = CompiledIC_at(&iter);
1812 if (ic->is_icholder_call()) { 1952 clean_ic_if_metadata_is_dead(ic, is_alive, false);
1813 // The only exception is compiledICHolder oops which may
1814 // yet be marked below. (We check this further below).
1815 CompiledICHolder* cichk_oop = ic->cached_icholder();
1816 if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
1817 cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
1818 continue;
1819 }
1820 } else {
1821 Metadata* ic_oop = ic->cached_metadata();
1822 if (ic_oop != NULL) {
1823 if (ic_oop->is_klass()) {
1824 if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1825 continue;
1826 }
1827 } else if (ic_oop->is_method()) {
1828 if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1829 continue;
1830 }
1831 } else {
1832 ShouldNotReachHere();
1833 }
1834 }
1835 }
1836 ic->set_to_clean();
1837 } 1953 }
1838 } 1954 }
1839 } 1955 }
1840 1956
1841 // Compiled code 1957 // Compiled code
1869 1985
1870 // Ensure that all metadata is still alive 1986 // Ensure that all metadata is still alive
1871 verify_metadata_loaders(low_boundary, is_alive); 1987 verify_metadata_loaders(low_boundary, is_alive);
1872 } 1988 }
1873 1989
1990 template <class CompiledICorStaticCall>
1991 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, nmethod* from) {
1992 // Ok, to lookup references to zombies here
1993 CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
1994 if (cb != NULL && cb->is_nmethod()) {
1995 nmethod* nm = (nmethod*)cb;
1996
1997 if (nm->unloading_clock() != nmethod::global_unloading_clock()) {
1998 // The nmethod has not been processed yet.
1999 return true;
2000 }
2001
2002 // Clean inline caches pointing to both zombie and not_entrant methods
2003 if (!nm->is_in_use() || (nm->method()->code() != nm)) {
2004 ic->set_to_clean();
2005 assert(ic->is_clean(), err_msg("nmethod " PTR_FORMAT "not clean %s", from, from->method()->name_and_sig_as_C_string()));
2006 }
2007 }
2008
2009 return false;
2010 }
2011
2012 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, nmethod* from) {
2013 return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from);
2014 }
2015
2016 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, nmethod* from) {
2017 return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from);
2018 }
2019
2020 bool nmethod::unload_if_dead_at(RelocIterator* iter_at_oop, BoolObjectClosure *is_alive, bool unloading_occurred) {
2021 assert(iter_at_oop->type() == relocInfo::oop_type, "Wrong relocation type");
2022
2023 oop_Relocation* r = iter_at_oop->oop_reloc();
2024 // Traverse those oops directly embedded in the code.
2025 // Other oops (oop_index>0) are seen as part of scopes_oops.
2026 assert(1 == (r->oop_is_immediate()) +
2027 (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
2028 "oop must be found in exactly one place");
2029 if (r->oop_is_immediate() && r->oop_value() != NULL) {
2030 // Unload this nmethod if the oop is dead.
2031 if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
2032 return true;;
2033 }
2034 }
2035
2036 return false;
2037 }
2038
2039 void nmethod::mark_metadata_on_stack_at(RelocIterator* iter_at_metadata) {
2040 assert(iter_at_metadata->type() == relocInfo::metadata_type, "Wrong relocation type");
2041
2042 metadata_Relocation* r = iter_at_metadata->metadata_reloc();
2043 // In this metadata, we must only follow those metadatas directly embedded in
2044 // the code. Other metadatas (oop_index>0) are seen as part of
2045 // the metadata section below.
2046 assert(1 == (r->metadata_is_immediate()) +
2047 (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()),
2048 "metadata must be found in exactly one place");
2049 if (r->metadata_is_immediate() && r->metadata_value() != NULL) {
2050 Metadata* md = r->metadata_value();
2051 if (md != _method) Metadata::mark_on_stack(md);
2052 }
2053 }
2054
2055 void nmethod::mark_metadata_on_stack_non_relocs() {
2056 // Visit the metadata section
2057 for (Metadata** p = metadata_begin(); p < metadata_end(); p++) {
2058 if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops
2059 Metadata* md = *p;
2060 Metadata::mark_on_stack(md);
2061 }
2062
2063 // Visit metadata not embedded in the other places.
2064 if (_method != NULL) Metadata::mark_on_stack(_method);
2065 }
2066
2067 bool nmethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
2068 ResourceMark rm;
2069
2070 // Make sure the oop's ready to receive visitors
2071 assert(!is_zombie() && !is_unloaded(),
2072 "should not call follow on zombie or unloaded nmethod");
2073
2074 // If the method is not entrant then a JMP is plastered over the
2075 // first few bytes. If an oop in the old code was there, that oop
2076 // should not get GC'd. Skip the first few bytes of oops on
2077 // not-entrant methods.
2078 address low_boundary = verified_entry_point();
2079 if (is_not_entrant()) {
2080 low_boundary += NativeJump::instruction_size;
2081 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
2082 // (See comment above.)
2083 }
2084
2085 // The RedefineClasses() API can cause the class unloading invariant
2086 // to no longer be true. See jvmtiExport.hpp for details.
2087 // Also, leave a debugging breadcrumb in local flag.
2088 bool a_class_was_redefined = JvmtiExport::has_redefined_a_class();
2089 if (a_class_was_redefined) {
2090 // This set of the unloading_occurred flag is done before the
2091 // call to post_compiled_method_unload() so that the unloading
2092 // of this nmethod is reported.
2093 unloading_occurred = true;
2094 }
2095
2096 // When class redefinition is used all metadata in the CodeCache has to be recorded,
2097 // so that unused "previous versions" can be purged. Since walking the CodeCache can
2098 // be expensive, the "mark on stack" is piggy-backed on this parallel unloading code.
2099 bool mark_metadata_on_stack = a_class_was_redefined;
2100
2101 // Exception cache
2102 clean_exception_cache(is_alive);
2103
2104 bool is_unloaded = false;
2105 bool postponed = false;
2106
2107 RelocIterator iter(this, low_boundary);
2108 while(iter.next()) {
2109
2110 switch (iter.type()) {
2111
2112 case relocInfo::virtual_call_type:
2113 if (unloading_occurred) {
2114 // If class unloading occurred we first iterate over all inline caches and
2115 // clear ICs where the cached oop is referring to an unloaded klass or method.
2116 clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive, mark_metadata_on_stack);
2117 }
2118
2119 postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
2120 break;
2121
2122 case relocInfo::opt_virtual_call_type:
2123 postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
2124 break;
2125
2126 case relocInfo::static_call_type:
2127 postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
2128 break;
2129
2130 case relocInfo::oop_type:
2131 if (!is_unloaded) {
2132 is_unloaded = unload_if_dead_at(&iter, is_alive, unloading_occurred);
2133 }
2134 break;
2135
2136 case relocInfo::metadata_type:
2137 if (mark_metadata_on_stack) {
2138 mark_metadata_on_stack_at(&iter);
2139 }
2140 }
2141 }
2142
2143 if (mark_metadata_on_stack) {
2144 mark_metadata_on_stack_non_relocs();
2145 }
2146
2147 if (is_unloaded) {
2148 return postponed;
2149 }
2150
2151 // Scopes
2152 for (oop* p = oops_begin(); p < oops_end(); p++) {
2153 if (*p == Universe::non_oop_word()) continue; // skip non-oops
2154 if (can_unload(is_alive, p, unloading_occurred)) {
2155 is_unloaded = true;
2156 break;
2157 }
2158 }
2159
2160 if (is_unloaded) {
2161 return postponed;
2162 }
2163
2164 // Ensure that all metadata is still alive
2165 verify_metadata_loaders(low_boundary, is_alive);
2166
2167 return postponed;
2168 }
2169
2170 void nmethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) {
2171 ResourceMark rm;
2172
2173 // Make sure the oop's ready to receive visitors
2174 assert(!is_zombie(),
2175 "should not call follow on zombie nmethod");
2176
2177 // If the method is not entrant then a JMP is plastered over the
2178 // first few bytes. If an oop in the old code was there, that oop
2179 // should not get GC'd. Skip the first few bytes of oops on
2180 // not-entrant methods.
2181 address low_boundary = verified_entry_point();
2182 if (is_not_entrant()) {
2183 low_boundary += NativeJump::instruction_size;
2184 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
2185 // (See comment above.)
2186 }
2187
2188 RelocIterator iter(this, low_boundary);
2189 while(iter.next()) {
2190
2191 switch (iter.type()) {
2192
2193 case relocInfo::virtual_call_type:
2194 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
2195 break;
2196
2197 case relocInfo::opt_virtual_call_type:
2198 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
2199 break;
2200
2201 case relocInfo::static_call_type:
2202 clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
2203 break;
2204 }
2205 }
2206 }
2207
1874 #ifdef ASSERT 2208 #ifdef ASSERT
1875 2209
1876 class CheckClass : AllStatic { 2210 class CheckClass : AllStatic {
1877 static BoolObjectClosure* _is_alive; 2211 static BoolObjectClosure* _is_alive;
1878 2212
1915 // static_stub_Relocations may have dangling references to 2249 // static_stub_Relocations may have dangling references to
1916 // Method*s so trim them out here. Otherwise it looks like 2250 // Method*s so trim them out here. Otherwise it looks like
1917 // compiled code is maintaining a link to dead metadata. 2251 // compiled code is maintaining a link to dead metadata.
1918 address static_call_addr = NULL; 2252 address static_call_addr = NULL;
1919 if (iter.type() == relocInfo::opt_virtual_call_type) { 2253 if (iter.type() == relocInfo::opt_virtual_call_type) {
1920 CompiledIC* cic = CompiledIC_at(iter.reloc()); 2254 CompiledIC* cic = CompiledIC_at(&iter);
1921 if (!cic->is_call_to_interpreted()) { 2255 if (!cic->is_call_to_interpreted()) {
1922 static_call_addr = iter.addr(); 2256 static_call_addr = iter.addr();
1923 } 2257 }
1924 } else if (iter.type() == relocInfo::static_call_type) { 2258 } else if (iter.type() == relocInfo::static_call_type) {
1925 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc()); 2259 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
1955 // Visit all immediate references that are embedded in the instruction stream. 2289 // Visit all immediate references that are embedded in the instruction stream.
1956 RelocIterator iter(this, low_boundary); 2290 RelocIterator iter(this, low_boundary);
1957 while (iter.next()) { 2291 while (iter.next()) {
1958 if (iter.type() == relocInfo::metadata_type ) { 2292 if (iter.type() == relocInfo::metadata_type ) {
1959 metadata_Relocation* r = iter.metadata_reloc(); 2293 metadata_Relocation* r = iter.metadata_reloc();
1960 // In this lmetadata, we must only follow those metadatas directly embedded in 2294 // In this metadata, we must only follow those metadatas directly embedded in
1961 // the code. Other metadatas (oop_index>0) are seen as part of 2295 // the code. Other metadatas (oop_index>0) are seen as part of
1962 // the metadata section below. 2296 // the metadata section below.
1963 assert(1 == (r->metadata_is_immediate()) + 2297 assert(1 == (r->metadata_is_immediate()) +
1964 (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()), 2298 (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()),
1965 "metadata must be found in exactly one place"); 2299 "metadata must be found in exactly one place");
1967 Metadata* md = r->metadata_value(); 2301 Metadata* md = r->metadata_value();
1968 f(md); 2302 f(md);
1969 } 2303 }
1970 } else if (iter.type() == relocInfo::virtual_call_type) { 2304 } else if (iter.type() == relocInfo::virtual_call_type) {
1971 // Check compiledIC holders associated with this nmethod 2305 // Check compiledIC holders associated with this nmethod
1972 CompiledIC *ic = CompiledIC_at(iter.reloc()); 2306 CompiledIC *ic = CompiledIC_at(&iter);
1973 if (ic->is_icholder_call()) { 2307 if (ic->is_icholder_call()) {
1974 CompiledICHolder* cichk = ic->cached_icholder(); 2308 CompiledICHolder* cichk = ic->cached_icholder();
1975 f(cichk->holder_method()); 2309 f(cichk->holder_method());
1976 f(cichk->holder_klass()); 2310 f(cichk->holder_klass());
1977 } else { 2311 } else {
1989 if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops 2323 if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops
1990 Metadata* md = *p; 2324 Metadata* md = *p;
1991 f(md); 2325 f(md);
1992 } 2326 }
1993 2327
1994 // Call function Method*, not embedded in these other places. 2328 // Visit metadata not embedded in the other places.
1995 if (_method != NULL) f(_method); 2329 if (_method != NULL) f(_method);
1996 } 2330 }
1997 2331
1998 void nmethod::oops_do(OopClosure* f, bool allow_zombie) { 2332 void nmethod::oops_do(OopClosure* f, bool allow_zombie) {
1999 // make sure the oops ready to receive visitors 2333 // make sure the oops ready to receive visitors
2094 nmethod* cur = _oops_do_mark_nmethods; 2428 nmethod* cur = _oops_do_mark_nmethods;
2095 while (cur != NMETHOD_SENTINEL) { 2429 while (cur != NMETHOD_SENTINEL) {
2096 assert(cur != NULL, "not NULL-terminated"); 2430 assert(cur != NULL, "not NULL-terminated");
2097 nmethod* next = cur->_oops_do_mark_link; 2431 nmethod* next = cur->_oops_do_mark_link;
2098 cur->_oops_do_mark_link = NULL; 2432 cur->_oops_do_mark_link = NULL;
2099 cur->fix_oop_relocations(); 2433 cur->verify_oop_relocations();
2100 NOT_PRODUCT(if (TraceScavenge) cur->print_on(tty, "oops_do, unmark")); 2434 NOT_PRODUCT(if (TraceScavenge) cur->print_on(tty, "oops_do, unmark"));
2101 cur = next; 2435 cur = next;
2102 } 2436 }
2103 void* required = _oops_do_mark_nmethods; 2437 void* required = _oops_do_mark_nmethods;
2104 void* observed = Atomic::cmpxchg_ptr(NULL, &_oops_do_mark_nmethods, required); 2438 void* observed = Atomic::cmpxchg_ptr(NULL, &_oops_do_mark_nmethods, required);
2636 } 2970 }
2637 virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); } 2971 virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
2638 }; 2972 };
2639 2973
2640 void nmethod::verify_scavenge_root_oops() { 2974 void nmethod::verify_scavenge_root_oops() {
2975 if (UseG1GC) {
2976 return;
2977 }
2978
2641 if (!on_scavenge_root_list()) { 2979 if (!on_scavenge_root_list()) {
2642 // Actually look inside, to verify the claim that it's clean. 2980 // Actually look inside, to verify the claim that it's clean.
2643 DebugScavengeRoot debug_scavenge_root(this); 2981 DebugScavengeRoot debug_scavenge_root(this);
2644 oops_do(&debug_scavenge_root); 2982 oops_do(&debug_scavenge_root);
2645 if (!debug_scavenge_root.ok()) 2983 if (!debug_scavenge_root.ok())
3084 while (iter.next()) { 3422 while (iter.next()) {
3085 switch (iter.type()) { 3423 switch (iter.type()) {
3086 case relocInfo::virtual_call_type: 3424 case relocInfo::virtual_call_type:
3087 case relocInfo::opt_virtual_call_type: { 3425 case relocInfo::opt_virtual_call_type: {
3088 VerifyMutexLocker mc(CompiledIC_lock); 3426 VerifyMutexLocker mc(CompiledIC_lock);
3089 CompiledIC_at(iter.reloc())->print(); 3427 CompiledIC_at(&iter)->print();
3090 break; 3428 break;
3091 } 3429 }
3092 case relocInfo::static_call_type: 3430 case relocInfo::static_call_type:
3093 st->print_cr("Static call at " INTPTR_FORMAT, iter.reloc()->addr()); 3431 st->print_cr("Static call at " INTPTR_FORMAT, iter.reloc()->addr());
3094 compiledStaticCall_at(iter.reloc())->print(); 3432 compiledStaticCall_at(iter.reloc())->print();