comparison src/share/vm/code/nmethod.cpp @ 10408:836a62f43af9

Merge with http://hg.openjdk.java.net/hsx/hsx25/hotspot/
author Doug Simon <doug.simon@oracle.com>
date Wed, 19 Jun 2013 10:45:56 +0200
parents 4a7dc38ae96b 28e5aed7f3a6
children 3489047ffea2
comparison
equal deleted inserted replaced
10086:e0fb8a213650 10408:836a62f43af9
508 // create nmethod 508 // create nmethod
509 nmethod* nm = NULL; 509 nmethod* nm = NULL;
510 { 510 {
511 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 511 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
512 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 512 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
513 if (CodeCache::has_space(native_nmethod_size)) { 513 CodeOffsets offsets;
514 CodeOffsets offsets; 514 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset);
515 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); 515 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete);
516 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); 516 nm = new (native_nmethod_size) nmethod(method(), native_nmethod_size,
517 nm = new (native_nmethod_size) nmethod(method(), native_nmethod_size, 517 compile_id, &offsets,
518 compile_id, &offsets, 518 code_buffer, frame_size,
519 code_buffer, frame_size, 519 basic_lock_owner_sp_offset,
520 basic_lock_owner_sp_offset, 520 basic_lock_sp_offset, oop_maps);
521 basic_lock_sp_offset, oop_maps); 521 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_native_nmethod(nm));
522 if (nm != NULL) nmethod_stats.note_native_nmethod(nm); 522 if (PrintAssembly && nm != NULL) {
523 if (PrintAssembly && nm != NULL) 523 Disassembler::decode(nm);
524 Disassembler::decode(nm);
525 } 524 }
526 } 525 }
527 // verify nmethod 526 // verify nmethod
528 debug_only(if (nm) nm->verify();) // might block 527 debug_only(if (nm) nm->verify();) // might block
529 528
545 // create nmethod 544 // create nmethod
546 nmethod* nm = NULL; 545 nmethod* nm = NULL;
547 { 546 {
548 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 547 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
549 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 548 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
550 if (CodeCache::has_space(nmethod_size)) { 549 CodeOffsets offsets;
551 CodeOffsets offsets; 550 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset);
552 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); 551 offsets.set_value(CodeOffsets::Dtrace_trap, trap_offset);
553 offsets.set_value(CodeOffsets::Dtrace_trap, trap_offset); 552 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete);
554 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); 553
555 554 nm = new (nmethod_size) nmethod(method(), nmethod_size,
556 nm = new (nmethod_size) nmethod(method(), nmethod_size, 555 &offsets, code_buffer, frame_size);
557 &offsets, code_buffer, frame_size); 556
558 557 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm));
559 if (nm != NULL) nmethod_stats.note_nmethod(nm); 558 if (PrintAssembly && nm != NULL) {
560 if (PrintAssembly && nm != NULL) 559 Disassembler::decode(nm);
561 Disassembler::decode(nm);
562 } 560 }
563 } 561 }
564 // verify nmethod 562 // verify nmethod
565 debug_only(if (nm) nm->verify();) // might block 563 debug_only(if (nm) nm->verify();) // might block
566 564
605 + round_to(dependencies->size_in_bytes() , oopSize) 603 + round_to(dependencies->size_in_bytes() , oopSize)
606 + round_to(handler_table->size_in_bytes(), oopSize) 604 + round_to(handler_table->size_in_bytes(), oopSize)
607 + round_to(nul_chk_table->size_in_bytes(), oopSize) 605 + round_to(nul_chk_table->size_in_bytes(), oopSize)
608 + round_to(debug_info->data_size() , oopSize) 606 + round_to(debug_info->data_size() , oopSize)
609 + leaf_graph_ids_size; 607 + leaf_graph_ids_size;
610 if (CodeCache::has_space(nmethod_size)) { 608 nm = new (nmethod_size)
611 nm = new (nmethod_size) 609 nmethod(method(), nmethod_size, compile_id, entry_bci, offsets,
612 nmethod(method(), nmethod_size, compile_id, entry_bci, offsets, 610 orig_pc_offset, debug_info, dependencies, code_buffer, frame_size,
613 orig_pc_offset, debug_info, dependencies, code_buffer, frame_size, 611 oop_maps,
614 oop_maps, 612 handler_table,
615 handler_table, 613 nul_chk_table,
616 nul_chk_table, 614 compiler,
617 compiler, 615 comp_level,
618 comp_level, 616 leaf_graph_ids
619 leaf_graph_ids
620 #ifdef GRAAL 617 #ifdef GRAAL
621 , installed_code, 618 , installed_code,
622 triggered_deoptimizations 619 triggered_deoptimizations
623 #endif 620 #endif
624 ); 621 );
625 } 622
626 if (nm != NULL) { 623 if (nm != NULL) {
627 // To make dependency checking during class loading fast, record 624 // To make dependency checking during class loading fast, record
628 // the nmethod dependencies in the classes it is dependent on. 625 // the nmethod dependencies in the classes it is dependent on.
629 // This allows the dependency checking code to simply walk the 626 // This allows the dependency checking code to simply walk the
630 // class hierarchy above the loaded class, checking only nmethods 627 // class hierarchy above the loaded class, checking only nmethods
632 // check every nmethod for dependencies which makes it linear in 629 // check every nmethod for dependencies which makes it linear in
633 // the number of methods compiled. For applications with a lot 630 // the number of methods compiled. For applications with a lot
634 // classes the slow way is too slow. 631 // classes the slow way is too slow.
635 for (Dependencies::DepStream deps(nm); deps.next(); ) { 632 for (Dependencies::DepStream deps(nm); deps.next(); ) {
636 Klass* klass = deps.context_type(); 633 Klass* klass = deps.context_type();
637 if (klass == NULL) continue; // ignore things like evol_method 634 if (klass == NULL) {
635 continue; // ignore things like evol_method
636 }
638 637
639 // record this nmethod as dependent on this klass 638 // record this nmethod as dependent on this klass
640 InstanceKlass::cast(klass)->add_dependent_nmethod(nm); 639 InstanceKlass::cast(klass)->add_dependent_nmethod(nm);
641 } 640 }
642 } 641 }
643 if (nm != NULL) nmethod_stats.note_nmethod(nm); 642 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm));
644 if (PrintAssembly && nm != NULL) 643 if (PrintAssembly && nm != NULL) {
645 Disassembler::decode(nm); 644 Disassembler::decode(nm);
645 }
646 } 646 }
647 647
648 // verify nmethod 648 // verify nmethod
649 debug_only(if (nm) nm->verify();) // might block 649 debug_only(if (nm) nm->verify();) // might block
650 650
819 } 819 }
820 } 820 }
821 } 821 }
822 #endif // def HAVE_DTRACE_H 822 #endif // def HAVE_DTRACE_H
823 823
824 void* nmethod::operator new(size_t size, int nmethod_size) { 824 void* nmethod::operator new(size_t size, int nmethod_size) throw () {
825 void* alloc = CodeCache::allocate(nmethod_size); 825 // Not critical, may return null if there is too little continuous memory
826 guarantee(alloc != NULL, "CodeCache should have enough space"); 826 return CodeCache::allocate(nmethod_size);
827 return alloc; 827 }
828 }
829
830 828
831 nmethod::nmethod( 829 nmethod::nmethod(
832 Method* method, 830 Method* method,
833 int nmethod_size, 831 int nmethod_size,
834 int compile_id, 832 int compile_id,
1891 "metadata must be found in exactly one place"); 1889 "metadata must be found in exactly one place");
1892 if (r->metadata_is_immediate() && r->metadata_value() != NULL) { 1890 if (r->metadata_is_immediate() && r->metadata_value() != NULL) {
1893 Metadata* md = r->metadata_value(); 1891 Metadata* md = r->metadata_value();
1894 f(md); 1892 f(md);
1895 } 1893 }
1894 } else if (iter.type() == relocInfo::virtual_call_type) {
1895 // Check compiledIC holders associated with this nmethod
1896 CompiledIC *ic = CompiledIC_at(iter.reloc());
1897 if (ic->is_icholder_call()) {
1898 CompiledICHolder* cichk = ic->cached_icholder();
1899 f(cichk->holder_method());
1900 f(cichk->holder_klass());
1901 } else {
1902 Metadata* ic_oop = ic->cached_metadata();
1903 if (ic_oop != NULL) {
1904 f(ic_oop);
1905 }
1906 }
1896 } 1907 }
1897 } 1908 }
1898 } 1909 }
1899 1910
1900 // Visit the metadata section 1911 // Visit the metadata section
1901 for (Metadata** p = metadata_begin(); p < metadata_end(); p++) { 1912 for (Metadata** p = metadata_begin(); p < metadata_end(); p++) {
1902 if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops 1913 if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops
1903 Metadata* md = *p; 1914 Metadata* md = *p;
1904 f(md); 1915 f(md);
1905 } 1916 }
1917
1906 // Call function Method*, not embedded in these other places. 1918 // Call function Method*, not embedded in these other places.
1907 if (_method != NULL) f(_method); 1919 if (_method != NULL) f(_method);
1908 } 1920 }
1909 1921
1910 1922
2068 void nmethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) { 2080 void nmethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
2069 #ifndef SHARK 2081 #ifndef SHARK
2070 if (!method()->is_native()) { 2082 if (!method()->is_native()) {
2071 SimpleScopeDesc ssd(this, fr.pc()); 2083 SimpleScopeDesc ssd(this, fr.pc());
2072 Bytecode_invoke call(ssd.method(), ssd.bci()); 2084 Bytecode_invoke call(ssd.method(), ssd.bci());
2073 // compiled invokedynamic call sites have an implicit receiver at 2085 bool has_receiver = call.has_receiver();
2074 // resolution time, so make sure it gets GC'ed. 2086 bool has_appendix = call.has_appendix();
2075 bool has_receiver = !call.is_invokestatic();
2076 Symbol* signature = call.signature(); 2087 Symbol* signature = call.signature();
2077 fr.oops_compiled_arguments_do(signature, has_receiver, reg_map, f); 2088 fr.oops_compiled_arguments_do(signature, has_receiver, has_appendix, reg_map, f);
2078 } 2089 }
2079 #endif // !SHARK 2090 #endif // !SHARK
2080 } 2091 }
2081 2092
2082 2093