comparison src/share/vm/code/nmethod.cpp @ 10114:a7fb14888912

8006952: Slow VM due to excessive code cache freelist iteration Summary: Remove continous free block requirement Reviewed-by: kvn
author neliasso
date Thu, 11 Apr 2013 13:57:44 +0200
parents 3c786355ffb4
children 4674e409a9e6
comparison
equal deleted inserted replaced
10113:4b2eebe03f93 10114:a7fb14888912
499 // create nmethod 499 // create nmethod
500 nmethod* nm = NULL; 500 nmethod* nm = NULL;
501 { 501 {
502 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 502 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
503 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 503 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
504 if (CodeCache::has_space(native_nmethod_size)) { 504 CodeOffsets offsets;
505 CodeOffsets offsets; 505 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset);
506 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); 506 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete);
507 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); 507 nm = new (native_nmethod_size) nmethod(method(), native_nmethod_size,
508 nm = new (native_nmethod_size) nmethod(method(), native_nmethod_size, 508 compile_id, &offsets,
509 compile_id, &offsets, 509 code_buffer, frame_size,
510 code_buffer, frame_size, 510 basic_lock_owner_sp_offset,
511 basic_lock_owner_sp_offset, 511 basic_lock_sp_offset, oop_maps);
512 basic_lock_sp_offset, oop_maps); 512 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_native_nmethod(nm));
513 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_native_nmethod(nm)); 513 if (PrintAssembly && nm != NULL) {
514 if (PrintAssembly && nm != NULL) 514 Disassembler::decode(nm);
515 Disassembler::decode(nm);
516 } 515 }
517 } 516 }
518 // verify nmethod 517 // verify nmethod
519 debug_only(if (nm) nm->verify();) // might block 518 debug_only(if (nm) nm->verify();) // might block
520 519
536 // create nmethod 535 // create nmethod
537 nmethod* nm = NULL; 536 nmethod* nm = NULL;
538 { 537 {
539 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 538 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
540 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 539 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
541 if (CodeCache::has_space(nmethod_size)) { 540 CodeOffsets offsets;
542 CodeOffsets offsets; 541 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset);
543 offsets.set_value(CodeOffsets::Verified_Entry, vep_offset); 542 offsets.set_value(CodeOffsets::Dtrace_trap, trap_offset);
544 offsets.set_value(CodeOffsets::Dtrace_trap, trap_offset); 543 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete);
545 offsets.set_value(CodeOffsets::Frame_Complete, frame_complete); 544
546 545 nm = new (nmethod_size) nmethod(method(), nmethod_size,
547 nm = new (nmethod_size) nmethod(method(), nmethod_size, 546 &offsets, code_buffer, frame_size);
548 &offsets, code_buffer, frame_size); 547
549 548 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm));
550 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm)); 549 if (PrintAssembly && nm != NULL) {
551 if (PrintAssembly && nm != NULL) 550 Disassembler::decode(nm);
552 Disassembler::decode(nm);
553 } 551 }
554 } 552 }
555 // verify nmethod 553 // verify nmethod
556 debug_only(if (nm) nm->verify();) // might block 554 debug_only(if (nm) nm->verify();) // might block
557 555
589 + adjust_pcs_size(debug_info->pcs_size()) 587 + adjust_pcs_size(debug_info->pcs_size())
590 + round_to(dependencies->size_in_bytes() , oopSize) 588 + round_to(dependencies->size_in_bytes() , oopSize)
591 + round_to(handler_table->size_in_bytes(), oopSize) 589 + round_to(handler_table->size_in_bytes(), oopSize)
592 + round_to(nul_chk_table->size_in_bytes(), oopSize) 590 + round_to(nul_chk_table->size_in_bytes(), oopSize)
593 + round_to(debug_info->data_size() , oopSize); 591 + round_to(debug_info->data_size() , oopSize);
594 if (CodeCache::has_space(nmethod_size)) { 592
595 nm = new (nmethod_size) 593 nm = new (nmethod_size)
596 nmethod(method(), nmethod_size, compile_id, entry_bci, offsets, 594 nmethod(method(), nmethod_size, compile_id, entry_bci, offsets,
597 orig_pc_offset, debug_info, dependencies, code_buffer, frame_size, 595 orig_pc_offset, debug_info, dependencies, code_buffer, frame_size,
598 oop_maps, 596 oop_maps,
599 handler_table, 597 handler_table,
600 nul_chk_table, 598 nul_chk_table,
601 compiler, 599 compiler,
602 comp_level); 600 comp_level);
603 } 601
604 if (nm != NULL) { 602 if (nm != NULL) {
605 // To make dependency checking during class loading fast, record 603 // To make dependency checking during class loading fast, record
606 // the nmethod dependencies in the classes it is dependent on. 604 // the nmethod dependencies in the classes it is dependent on.
607 // This allows the dependency checking code to simply walk the 605 // This allows the dependency checking code to simply walk the
608 // class hierarchy above the loaded class, checking only nmethods 606 // class hierarchy above the loaded class, checking only nmethods
610 // check every nmethod for dependencies which makes it linear in 608 // check every nmethod for dependencies which makes it linear in
611 // the number of methods compiled. For applications with a lot 609 // the number of methods compiled. For applications with a lot
612 // classes the slow way is too slow. 610 // classes the slow way is too slow.
613 for (Dependencies::DepStream deps(nm); deps.next(); ) { 611 for (Dependencies::DepStream deps(nm); deps.next(); ) {
614 Klass* klass = deps.context_type(); 612 Klass* klass = deps.context_type();
615 if (klass == NULL) continue; // ignore things like evol_method 613 if (klass == NULL) {
614 continue; // ignore things like evol_method
615 }
616 616
617 // record this nmethod as dependent on this klass 617 // record this nmethod as dependent on this klass
618 InstanceKlass::cast(klass)->add_dependent_nmethod(nm); 618 InstanceKlass::cast(klass)->add_dependent_nmethod(nm);
619 } 619 }
620 } 620 }
621 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm)); 621 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm));
622 if (PrintAssembly && nm != NULL) 622 if (PrintAssembly && nm != NULL) {
623 Disassembler::decode(nm); 623 Disassembler::decode(nm);
624 }
624 } 625 }
625 626
626 // verify nmethod 627 // verify nmethod
627 debug_only(if (nm) nm->verify();) // might block 628 debug_only(if (nm) nm->verify();) // might block
628 629
796 } 797 }
797 } 798 }
798 } 799 }
799 #endif // def HAVE_DTRACE_H 800 #endif // def HAVE_DTRACE_H
800 801
801 void* nmethod::operator new(size_t size, int nmethod_size) { 802 void* nmethod::operator new(size_t size, int nmethod_size) throw () {
802 void* alloc = CodeCache::allocate(nmethod_size); 803 // Not critical, may return null if there is too little continuous memory
803 guarantee(alloc != NULL, "CodeCache should have enough space"); 804 return CodeCache::allocate(nmethod_size);
804 return alloc; 805 }
805 }
806
807 806
808 nmethod::nmethod( 807 nmethod::nmethod(
809 Method* method, 808 Method* method,
810 int nmethod_size, 809 int nmethod_size,
811 int compile_id, 810 int compile_id,