comparison src/share/vm/code/nmethod.cpp @ 12355:cefad50507d8

Merge with hs25-b53
author Gilles Duboscq <duboscq@ssw.jku.at>
date Fri, 11 Oct 2013 10:38:03 +0200
parents d0aeaf72c7bd 268e7a2178d7
children 359f7e70ae7f
comparison
equal deleted inserted replaced
12058:ccb4f2af2319 12355:cefad50507d8
1 /* 1 /*
2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
94 #define DTRACE_METHOD_UNLOAD_PROBE(method) 94 #define DTRACE_METHOD_UNLOAD_PROBE(method)
95 95
96 #endif 96 #endif
97 97
98 bool nmethod::is_compiled_by_c1() const { 98 bool nmethod::is_compiled_by_c1() const {
99 if (compiler() == NULL || method() == NULL) return false; // can happen during debug printing 99 if (compiler() == NULL) {
100 if (is_native_method()) return false; 100 return false;
101 }
101 return compiler()->is_c1(); 102 return compiler()->is_c1();
102 } 103 }
103 bool nmethod::is_compiled_by_graal() const { 104 bool nmethod::is_compiled_by_graal() const {
104 if (compiler() == NULL || method() == NULL) return false; // can happen during debug printing 105 if (compiler() == NULL || method() == NULL) return false; // can happen during debug printing
105 if (is_native_method()) return false; 106 if (is_native_method()) return false;
106 return compiler()->is_graal(); 107 return compiler()->is_graal();
107 } 108 }
108 bool nmethod::is_compiled_by_c2() const { 109 bool nmethod::is_compiled_by_c2() const {
109 if (compiler() == NULL || method() == NULL) return false; // can happen during debug printing 110 if (compiler() == NULL) {
110 if (is_native_method()) return false; 111 return false;
112 }
111 return compiler()->is_c2(); 113 return compiler()->is_c2();
112 } 114 }
113 bool nmethod::is_compiled_by_shark() const { 115 bool nmethod::is_compiled_by_shark() const {
114 if (is_native_method()) return false; 116 if (compiler() == NULL) {
115 assert(compiler() != NULL, "must be"); 117 return false;
118 }
116 return compiler()->is_shark(); 119 return compiler()->is_shark();
117 } 120 }
118 121
119 122
120 123
463 // Fill in default values for various flag fields 466 // Fill in default values for various flag fields
464 void nmethod::init_defaults() { 467 void nmethod::init_defaults() {
465 _state = alive; 468 _state = alive;
466 _marked_for_reclamation = 0; 469 _marked_for_reclamation = 0;
467 _has_flushed_dependencies = 0; 470 _has_flushed_dependencies = 0;
468 _speculatively_disconnected = 0;
469 _has_unsafe_access = 0; 471 _has_unsafe_access = 0;
470 _has_method_handle_invokes = 0; 472 _has_method_handle_invokes = 0;
471 _lazy_critical_native = 0; 473 _lazy_critical_native = 0;
472 _has_wide_vectors = 0; 474 _has_wide_vectors = 0;
473 _marked_for_deoptimization = 0; 475 _marked_for_deoptimization = 0;
482 _oops_do_mark_link = NULL; 484 _oops_do_mark_link = NULL;
483 _jmethod_id = NULL; 485 _jmethod_id = NULL;
484 _osr_link = NULL; 486 _osr_link = NULL;
485 _scavenge_root_link = NULL; 487 _scavenge_root_link = NULL;
486 _scavenge_root_state = 0; 488 _scavenge_root_state = 0;
487 _saved_nmethod_link = NULL;
488 _compiler = NULL; 489 _compiler = NULL;
489 #ifdef GRAAL 490 #ifdef GRAAL
490 _graal_installed_code = NULL; 491 _graal_installed_code = NULL;
491 _triggered_deoptimizations = NULL; 492 _triggered_deoptimizations = NULL;
492 #endif 493 #endif
703 _entry_point = code_begin() + offsets->value(CodeOffsets::Entry); 704 _entry_point = code_begin() + offsets->value(CodeOffsets::Entry);
704 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry); 705 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
705 _osr_entry_point = NULL; 706 _osr_entry_point = NULL;
706 _exception_cache = NULL; 707 _exception_cache = NULL;
707 _pc_desc_cache.reset_to(NULL); 708 _pc_desc_cache.reset_to(NULL);
709 _hotness_counter = NMethodSweeper::hotness_counter_reset_val();
708 710
709 code_buffer->copy_values_to(this); 711 code_buffer->copy_values_to(this);
710 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 712 if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
711 CodeCache::add_scavenge_root_nmethod(this); 713 CodeCache::add_scavenge_root_nmethod(this);
714 Universe::heap()->register_nmethod(this);
712 } 715 }
713 debug_only(verify_scavenge_root_oops()); 716 debug_only(verify_scavenge_root_oops());
714 CodeCache::commit(this); 717 CodeCache::commit(this);
715 } 718 }
716 719
786 _entry_point = code_begin() + offsets->value(CodeOffsets::Entry); 789 _entry_point = code_begin() + offsets->value(CodeOffsets::Entry);
787 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry); 790 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
788 _osr_entry_point = NULL; 791 _osr_entry_point = NULL;
789 _exception_cache = NULL; 792 _exception_cache = NULL;
790 _pc_desc_cache.reset_to(NULL); 793 _pc_desc_cache.reset_to(NULL);
794 _hotness_counter = NMethodSweeper::hotness_counter_reset_val();
791 795
792 code_buffer->copy_values_to(this); 796 code_buffer->copy_values_to(this);
793 debug_only(verify_scavenge_root_oops()); 797 debug_only(verify_scavenge_root_oops());
794 CodeCache::commit(this); 798 CodeCache::commit(this);
795 } 799 }
819 } 823 }
820 } 824 }
821 } 825 }
822 #endif // def HAVE_DTRACE_H 826 #endif // def HAVE_DTRACE_H
823 827
824 void* nmethod::operator new(size_t size, int nmethod_size) throw () { 828 void* nmethod::operator new(size_t size, int nmethod_size) throw() {
825 // Not critical, may return null if there is too little continuous memory 829 // Not critical, may return null if there is too little continuous memory
826 return CodeCache::allocate(nmethod_size); 830 return CodeCache::allocate(nmethod_size);
827 } 831 }
828 832
829 nmethod::nmethod( 833 nmethod::nmethod(
863 _entry_bci = entry_bci; 867 _entry_bci = entry_bci;
864 _compile_id = compile_id; 868 _compile_id = compile_id;
865 _comp_level = comp_level; 869 _comp_level = comp_level;
866 _compiler = compiler; 870 _compiler = compiler;
867 _orig_pc_offset = orig_pc_offset; 871 _orig_pc_offset = orig_pc_offset;
872 _hotness_counter = NMethodSweeper::hotness_counter_reset_val();
868 873
869 // Section offsets 874 // Section offsets
870 _consts_offset = content_offset() + code_buffer->total_offset_of(code_buffer->consts()); 875 _consts_offset = content_offset() + code_buffer->total_offset_of(code_buffer->consts());
871 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs()); 876 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
872 877
933 code_buffer->copy_values_to(this); 938 code_buffer->copy_values_to(this);
934 debug_info->copy_to(this); 939 debug_info->copy_to(this);
935 dependencies->copy_to(this); 940 dependencies->copy_to(this);
936 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 941 if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
937 CodeCache::add_scavenge_root_nmethod(this); 942 CodeCache::add_scavenge_root_nmethod(this);
943 Universe::heap()->register_nmethod(this);
938 } 944 }
939 debug_only(verify_scavenge_root_oops()); 945 debug_only(verify_scavenge_root_oops());
940 946
941 CodeCache::commit(this); 947 CodeCache::commit(this);
942 948
1227 } 1233 }
1228 } 1234 }
1229 1235
1230 // This is a private interface with the sweeper. 1236 // This is a private interface with the sweeper.
1231 void nmethod::mark_as_seen_on_stack() { 1237 void nmethod::mark_as_seen_on_stack() {
1232 assert(is_not_entrant(), "must be a non-entrant method"); 1238 assert(is_alive(), "Must be an alive method");
1233 // Set the traversal mark to ensure that the sweeper does 2 1239 // Set the traversal mark to ensure that the sweeper does 2
1234 // cleaning passes before moving to zombie. 1240 // cleaning passes before moving to zombie.
1235 set_stack_traversal_mark(NMethodSweeper::traversal_count()); 1241 set_stack_traversal_mark(NMethodSweeper::traversal_count());
1236 } 1242 }
1237 1243
1323 // The Method* is gone at this point 1329 // The Method* is gone at this point
1324 assert(_method == NULL, "Tautology"); 1330 assert(_method == NULL, "Tautology");
1325 1331
1326 set_osr_link(NULL); 1332 set_osr_link(NULL);
1327 //set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods 1333 //set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods
1328 NMethodSweeper::notify(this); 1334 NMethodSweeper::notify();
1329 } 1335 }
1330 1336
1331 void nmethod::invalidate_osr_method() { 1337 void nmethod::invalidate_osr_method() {
1332 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod"); 1338 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1333 // Remove from list of active nmethods 1339 // Remove from list of active nmethods
1367 // Make sure neither the nmethod nor the method is flushed in case of a safepoint in code below. 1373 // Make sure neither the nmethod nor the method is flushed in case of a safepoint in code below.
1368 nmethodLocker nml(this); 1374 nmethodLocker nml(this);
1369 methodHandle the_method(method()); 1375 methodHandle the_method(method());
1370 No_Safepoint_Verifier nsv; 1376 No_Safepoint_Verifier nsv;
1371 1377
1378 // during patching, depending on the nmethod state we must notify the GC that
1379 // code has been unloaded, unregistering it. We cannot do this right while
1380 // holding the Patching_lock because we need to use the CodeCache_lock. This
1381 // would be prone to deadlocks.
1382 // This flag is used to remember whether we need to later lock and unregister.
1383 bool nmethod_needs_unregister = false;
1384
1372 { 1385 {
1373 // invalidate osr nmethod before acquiring the patching lock since 1386 // invalidate osr nmethod before acquiring the patching lock since
1374 // they both acquire leaf locks and we don't want a deadlock. 1387 // they both acquire leaf locks and we don't want a deadlock.
1375 // This logic is equivalent to the logic below for patching the 1388 // This logic is equivalent to the logic below for patching the
1376 // verified entry point of regular methods. 1389 // verified entry point of regular methods.
1404 1417
1405 if (is_in_use()) { 1418 if (is_in_use()) {
1406 // It's a true state change, so mark the method as decompiled. 1419 // It's a true state change, so mark the method as decompiled.
1407 // Do it only for transition from alive. 1420 // Do it only for transition from alive.
1408 inc_decompile_count(); 1421 inc_decompile_count();
1422 }
1423
1424 // If the state is becoming a zombie, signal to unregister the nmethod with
1425 // the heap.
1426 // This nmethod may have already been unloaded during a full GC.
1427 if ((state == zombie) && !is_unloaded()) {
1428 nmethod_needs_unregister = true;
1429 }
1430
1431 // Must happen before state change. Otherwise we have a race condition in
1432 // nmethod::can_not_entrant_be_converted(). I.e., a method can immediately
1433 // transition its state from 'not_entrant' to 'zombie' without having to wait
1434 // for stack scanning.
1435 if (state == not_entrant) {
1436 mark_as_seen_on_stack();
1437 OrderAccess::storestore();
1409 } 1438 }
1410 1439
1411 // Change state 1440 // Change state
1412 _state = state; 1441 _state = state;
1413 1442
1424 if (method() != NULL && (method()->code() == this || 1453 if (method() != NULL && (method()->code() == this ||
1425 method()->from_compiled_entry() == verified_entry_point())) { 1454 method()->from_compiled_entry() == verified_entry_point())) {
1426 HandleMark hm; 1455 HandleMark hm;
1427 method()->clear_code(); 1456 method()->clear_code();
1428 } 1457 }
1429
1430 if (state == not_entrant) {
1431 mark_as_seen_on_stack();
1432 }
1433
1434 } // leave critical region under Patching_lock 1458 } // leave critical region under Patching_lock
1435 1459
1436 // When the nmethod becomes zombie it is no longer alive so the 1460 // When the nmethod becomes zombie it is no longer alive so the
1437 // dependencies must be flushed. nmethods in the not_entrant 1461 // dependencies must be flushed. nmethods in the not_entrant
1438 // state will be flushed later when the transition to zombie 1462 // state will be flushed later when the transition to zombie
1441 { 1465 {
1442 // Flushing dependecies must be done before any possible 1466 // Flushing dependecies must be done before any possible
1443 // safepoint can sneak in, otherwise the oops used by the 1467 // safepoint can sneak in, otherwise the oops used by the
1444 // dependency logic could have become stale. 1468 // dependency logic could have become stale.
1445 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 1469 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
1470 if (nmethod_needs_unregister) {
1471 Universe::heap()->unregister_nmethod(this);
1472 }
1446 flush_dependencies(NULL); 1473 flush_dependencies(NULL);
1447 } 1474 }
1448 1475
1449 // zombie only - if a JVMTI agent has enabled the CompiledMethodUnload 1476 // zombie only - if a JVMTI agent has enabled the CompiledMethodUnload
1450 // event and it hasn't already been reported for this nmethod then 1477 // event and it hasn't already been reported for this nmethod then
1456 #ifdef ASSERT 1483 #ifdef ASSERT
1457 // It's no longer safe to access the oops section since zombie 1484 // It's no longer safe to access the oops section since zombie
1458 // nmethods aren't scanned for GC. 1485 // nmethods aren't scanned for GC.
1459 _oops_are_stale = true; 1486 _oops_are_stale = true;
1460 #endif 1487 #endif
1488 // the Method may be reclaimed by class unloading now that the
1489 // nmethod is in zombie state
1490 set_method(NULL);
1461 } else { 1491 } else {
1462 assert(state == not_entrant, "other cases may need to be handled differently"); 1492 assert(state == not_entrant, "other cases may need to be handled differently");
1463 } 1493 }
1464 1494
1465 if (TraceCreateZombies) { 1495 if (TraceCreateZombies) {
1466 ResourceMark m; 1496 ResourceMark m;
1467 tty->print_cr("nmethod <" INTPTR_FORMAT "> %s code made %s", this, this->method()->name_and_sig_as_C_string(), (state == not_entrant) ? "not entrant" : "zombie"); 1497 tty->print_cr("nmethod <" INTPTR_FORMAT "> %s code made %s", this, this->method()->name_and_sig_as_C_string(), (state == not_entrant) ? "not entrant" : "zombie");
1468 } 1498 }
1469 1499
1470 // Make sweeper aware that there is a zombie method that needs to be removed 1500 // Make sweeper aware that there is a zombie method that needs to be removed
1471 NMethodSweeper::notify(this); 1501 NMethodSweeper::notify();
1472 1502
1473 return true; 1503 return true;
1474 } 1504 }
1475 1505
1476 void nmethod::flush() { 1506 void nmethod::flush() {
1499 ec = next; 1529 ec = next;
1500 } 1530 }
1501 1531
1502 if (on_scavenge_root_list()) { 1532 if (on_scavenge_root_list()) {
1503 CodeCache::drop_scavenge_root_nmethod(this); 1533 CodeCache::drop_scavenge_root_nmethod(this);
1504 }
1505
1506 if (is_speculatively_disconnected()) {
1507 CodeCache::remove_saved_code(this);
1508 } 1534 }
1509 1535
1510 #ifdef SHARK 1536 #ifdef SHARK
1511 ((SharkCompiler *) compiler())->free_compiled_method(insts_begin()); 1537 ((SharkCompiler *) compiler())->free_compiled_method(insts_begin());
1512 #endif // SHARK 1538 #endif // SHARK
1906 1932
1907 // Call function Method*, not embedded in these other places. 1933 // Call function Method*, not embedded in these other places.
1908 if (_method != NULL) f(_method); 1934 if (_method != NULL) f(_method);
1909 } 1935 }
1910 1936
1911 1937 void nmethod::oops_do(OopClosure* f, bool allow_zombie) {
1912 // This method is called twice during GC -- once while
1913 // tracing the "active" nmethods on thread stacks during
1914 // the (strong) marking phase, and then again when walking
1915 // the code cache contents during the weak roots processing
1916 // phase. The two uses are distinguished by means of the
1917 // 'do_strong_roots_only' flag, which is true in the first
1918 // case. We want to walk the weak roots in the nmethod
1919 // only in the second case. The weak roots in the nmethod
1920 // are the oops in the ExceptionCache and the InlineCache
1921 // oops.
1922 void nmethod::oops_do(OopClosure* f, bool do_strong_roots_only) {
1923 // make sure the oops ready to receive visitors 1938 // make sure the oops ready to receive visitors
1924 assert(!is_zombie() && !is_unloaded(), 1939 assert(allow_zombie || !is_zombie(), "should not call follow on zombie nmethod");
1925 "should not call follow on zombie or unloaded nmethod"); 1940 assert(!is_unloaded(), "should not call follow on unloaded nmethod");
1926 1941
1927 // If the method is not entrant or zombie then a JMP is plastered over the 1942 // If the method is not entrant or zombie then a JMP is plastered over the
1928 // first few bytes. If an oop in the old code was there, that oop 1943 // first few bytes. If an oop in the old code was there, that oop
1929 // should not get GC'd. Skip the first few bytes of oops on 1944 // should not get GC'd. Skip the first few bytes of oops on
1930 // not-entrant methods. 1945 // not-entrant methods.
2049 void maybe_print(oop* p) { 2064 void maybe_print(oop* p) {
2050 if (_print_nm == NULL) return; 2065 if (_print_nm == NULL) return;
2051 if (!_detected_scavenge_root) _print_nm->print_on(tty, "new scavenge root"); 2066 if (!_detected_scavenge_root) _print_nm->print_on(tty, "new scavenge root");
2052 tty->print_cr(""PTR_FORMAT"[offset=%d] detected scavengable oop "PTR_FORMAT" (found at "PTR_FORMAT")", 2067 tty->print_cr(""PTR_FORMAT"[offset=%d] detected scavengable oop "PTR_FORMAT" (found at "PTR_FORMAT")",
2053 _print_nm, (int)((intptr_t)p - (intptr_t)_print_nm), 2068 _print_nm, (int)((intptr_t)p - (intptr_t)_print_nm),
2054 (intptr_t)(*p), (intptr_t)p); 2069 (void *)(*p), (intptr_t)p);
2055 (*p)->print(); 2070 (*p)->print();
2056 } 2071 }
2057 #endif //PRODUCT 2072 #endif //PRODUCT
2058 }; 2073 };
2059 2074
2429 if (_ok) { 2444 if (_ok) {
2430 _nm->print_nmethod(true); 2445 _nm->print_nmethod(true);
2431 _ok = false; 2446 _ok = false;
2432 } 2447 }
2433 tty->print_cr("*** non-oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)", 2448 tty->print_cr("*** non-oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)",
2434 (intptr_t)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm)); 2449 (void *)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
2435 } 2450 }
2436 virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); } 2451 virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
2437 }; 2452 };
2438 2453
2439 void nmethod::verify() { 2454 void nmethod::verify() {
2551 if (_ok) { 2566 if (_ok) {
2552 _nm->print_nmethod(true); 2567 _nm->print_nmethod(true);
2553 _ok = false; 2568 _ok = false;
2554 } 2569 }
2555 tty->print_cr("*** scavengable oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)", 2570 tty->print_cr("*** scavengable oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)",
2556 (intptr_t)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm)); 2571 (void *)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
2557 (*p)->print(); 2572 (*p)->print();
2558 } 2573 }
2559 virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); } 2574 virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
2560 }; 2575 };
2561 2576