comparison src/cpu/sparc/vm/templateInterpreter_sparc.cpp @ 728:85656c8fa13f

Merge
author twisti
date Wed, 22 Apr 2009 06:09:24 -0700
parents be93aad57795 6b2273dd6fa9
children 389049f3f393
comparison
equal deleted inserted replaced
726:be93aad57795 728:85656c8fa13f
85 __ set((intptr_t)message, G4_scratch); 85 __ set((intptr_t)message, G4_scratch);
86 __ call_VM(Oexception, CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception), G3_scratch, G4_scratch); 86 __ call_VM(Oexception, CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception), G3_scratch, G4_scratch);
87 } 87 }
88 // throw exception 88 // throw exception
89 assert(Interpreter::throw_exception_entry() != NULL, "generate it first"); 89 assert(Interpreter::throw_exception_entry() != NULL, "generate it first");
90 Address thrower(G3_scratch, Interpreter::throw_exception_entry()); 90 AddressLiteral thrower(Interpreter::throw_exception_entry());
91 __ jump_to (thrower); 91 __ jump_to(thrower, G3_scratch);
92 __ delayed()->nop(); 92 __ delayed()->nop();
93 return entry; 93 return entry;
94 } 94 }
95 95
96 address TemplateInterpreterGenerator::generate_ClassCastException_handler() { 96 address TemplateInterpreterGenerator::generate_ClassCastException_handler() {
185 185
186 186
187 const Register cache = G3_scratch; 187 const Register cache = G3_scratch;
188 const Register size = G1_scratch; 188 const Register size = G1_scratch;
189 __ get_cache_and_index_at_bcp(cache, G1_scratch, 1); 189 __ get_cache_and_index_at_bcp(cache, G1_scratch, 1);
190 __ ld_ptr(Address(cache, 0, in_bytes(constantPoolCacheOopDesc::base_offset()) + 190 __ ld_ptr(cache, constantPoolCacheOopDesc::base_offset() +
191 in_bytes(ConstantPoolCacheEntry::flags_offset())), size); 191 ConstantPoolCacheEntry::flags_offset(), size);
192 __ and3(size, 0xFF, size); // argument size in words 192 __ and3(size, 0xFF, size); // argument size in words
193 __ sll(size, Interpreter::logStackElementSize(), size); // each argument size in bytes 193 __ sll(size, Interpreter::logStackElementSize(), size); // each argument size in bytes
194 __ add(Lesp, size, Lesp); // pop arguments 194 __ add(Lesp, size, Lesp); // pop arguments
195 __ dispatch_next(state, step); 195 __ dispatch_next(state, step);
196 196
200 200
201 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, int step) { 201 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, int step) {
202 address entry = __ pc(); 202 address entry = __ pc();
203 __ get_constant_pool_cache(LcpoolCache); // load LcpoolCache 203 __ get_constant_pool_cache(LcpoolCache); // load LcpoolCache
204 { Label L; 204 { Label L;
205 Address exception_addr (G2_thread, 0, in_bytes(Thread::pending_exception_offset())); 205 Address exception_addr(G2_thread, Thread::pending_exception_offset());
206 206 __ ld_ptr(exception_addr, Gtemp); // Load pending exception.
207 __ ld_ptr(exception_addr, Gtemp);
208 __ tst(Gtemp); 207 __ tst(Gtemp);
209 __ brx(Assembler::equal, false, Assembler::pt, L); 208 __ brx(Assembler::equal, false, Assembler::pt, L);
210 __ delayed()->nop(); 209 __ delayed()->nop();
211 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_pending_exception)); 210 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_pending_exception));
212 __ should_not_reach_here(); 211 __ should_not_reach_here();
281 // 280 //
282 void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile_method, Label* profile_method_continue) { 281 void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile_method, Label* profile_method_continue) {
283 // Update standard invocation counters 282 // Update standard invocation counters
284 __ increment_invocation_counter(O0, G3_scratch); 283 __ increment_invocation_counter(O0, G3_scratch);
285 if (ProfileInterpreter) { // %%% Merge this into methodDataOop 284 if (ProfileInterpreter) { // %%% Merge this into methodDataOop
286 Address interpreter_invocation_counter(Lmethod, 0, in_bytes(methodOopDesc::interpreter_invocation_counter_offset())); 285 Address interpreter_invocation_counter(Lmethod, methodOopDesc::interpreter_invocation_counter_offset());
287 __ ld(interpreter_invocation_counter, G3_scratch); 286 __ ld(interpreter_invocation_counter, G3_scratch);
288 __ inc(G3_scratch); 287 __ inc(G3_scratch);
289 __ st(G3_scratch, interpreter_invocation_counter); 288 __ st(G3_scratch, interpreter_invocation_counter);
290 } 289 }
291 290
292 if (ProfileInterpreter && profile_method != NULL) { 291 if (ProfileInterpreter && profile_method != NULL) {
293 // Test to see if we should create a method data oop 292 // Test to see if we should create a method data oop
294 Address profile_limit(G3_scratch, (address)&InvocationCounter::InterpreterProfileLimit); 293 AddressLiteral profile_limit(&InvocationCounter::InterpreterProfileLimit);
295 __ sethi(profile_limit); 294 __ sethi(profile_limit, G3_scratch);
296 __ ld(profile_limit, G3_scratch); 295 __ ld(G3_scratch, profile_limit.low10(), G3_scratch);
297 __ cmp(O0, G3_scratch); 296 __ cmp(O0, G3_scratch);
298 __ br(Assembler::lessUnsigned, false, Assembler::pn, *profile_method_continue); 297 __ br(Assembler::lessUnsigned, false, Assembler::pn, *profile_method_continue);
299 __ delayed()->nop(); 298 __ delayed()->nop();
300 299
301 // if no method data exists, go to profile_method 300 // if no method data exists, go to profile_method
302 __ test_method_data_pointer(*profile_method); 301 __ test_method_data_pointer(*profile_method);
303 } 302 }
304 303
305 Address invocation_limit(G3_scratch, (address)&InvocationCounter::InterpreterInvocationLimit); 304 AddressLiteral invocation_limit(&InvocationCounter::InterpreterInvocationLimit);
306 __ sethi(invocation_limit); 305 __ sethi(invocation_limit, G3_scratch);
307 __ ld(invocation_limit, G3_scratch); 306 __ ld(G3_scratch, invocation_limit.low10(), G3_scratch);
308 __ cmp(O0, G3_scratch); 307 __ cmp(O0, G3_scratch);
309 __ br(Assembler::greaterEqualUnsigned, false, Assembler::pn, *overflow); 308 __ br(Assembler::greaterEqualUnsigned, false, Assembler::pn, *overflow);
310 __ delayed()->nop(); 309 __ delayed()->nop();
311 310
312 } 311 }
313 312
314 // Allocate monitor and lock method (asm interpreter) 313 // Allocate monitor and lock method (asm interpreter)
315 // ebx - methodOop 314 // ebx - methodOop
316 // 315 //
317 void InterpreterGenerator::lock_method(void) { 316 void InterpreterGenerator::lock_method(void) {
318 const Address access_flags (Lmethod, 0, in_bytes(methodOopDesc::access_flags_offset())); 317 __ ld(Lmethod, in_bytes(methodOopDesc::access_flags_offset()), O0); // Load access flags.
319 __ ld(access_flags, O0);
320 318
321 #ifdef ASSERT 319 #ifdef ASSERT
322 { Label ok; 320 { Label ok;
323 __ btst(JVM_ACC_SYNCHRONIZED, O0); 321 __ btst(JVM_ACC_SYNCHRONIZED, O0);
324 __ br( Assembler::notZero, false, Assembler::pt, ok); 322 __ br( Assembler::notZero, false, Assembler::pt, ok);
358 356
359 void TemplateInterpreterGenerator::generate_stack_overflow_check(Register Rframe_size, 357 void TemplateInterpreterGenerator::generate_stack_overflow_check(Register Rframe_size,
360 Register Rscratch, 358 Register Rscratch,
361 Register Rscratch2) { 359 Register Rscratch2) {
362 const int page_size = os::vm_page_size(); 360 const int page_size = os::vm_page_size();
363 Address saved_exception_pc(G2_thread, 0, 361 Address saved_exception_pc(G2_thread, JavaThread::saved_exception_pc_offset());
364 in_bytes(JavaThread::saved_exception_pc_offset()));
365 Label after_frame_check; 362 Label after_frame_check;
366 363
367 assert_different_registers(Rframe_size, Rscratch, Rscratch2); 364 assert_different_registers(Rframe_size, Rscratch, Rscratch2);
368 365
369 __ set( page_size, Rscratch ); 366 __ set( page_size, Rscratch );
371 368
372 __ br( Assembler::lessEqual, false, Assembler::pt, after_frame_check ); 369 __ br( Assembler::lessEqual, false, Assembler::pt, after_frame_check );
373 __ delayed()->nop(); 370 __ delayed()->nop();
374 371
375 // get the stack base, and in debug, verify it is non-zero 372 // get the stack base, and in debug, verify it is non-zero
376 __ ld_ptr( G2_thread, in_bytes(Thread::stack_base_offset()), Rscratch ); 373 __ ld_ptr( G2_thread, Thread::stack_base_offset(), Rscratch );
377 #ifdef ASSERT 374 #ifdef ASSERT
378 Label base_not_zero; 375 Label base_not_zero;
379 __ cmp( Rscratch, G0 ); 376 __ cmp( Rscratch, G0 );
380 __ brx( Assembler::notEqual, false, Assembler::pn, base_not_zero ); 377 __ brx( Assembler::notEqual, false, Assembler::pn, base_not_zero );
381 __ delayed()->nop(); 378 __ delayed()->nop();
383 __ bind(base_not_zero); 380 __ bind(base_not_zero);
384 #endif 381 #endif
385 382
386 // get the stack size, and in debug, verify it is non-zero 383 // get the stack size, and in debug, verify it is non-zero
387 assert( sizeof(size_t) == sizeof(intptr_t), "wrong load size" ); 384 assert( sizeof(size_t) == sizeof(intptr_t), "wrong load size" );
388 __ ld_ptr( G2_thread, in_bytes(Thread::stack_size_offset()), Rscratch2 ); 385 __ ld_ptr( G2_thread, Thread::stack_size_offset(), Rscratch2 );
389 #ifdef ASSERT 386 #ifdef ASSERT
390 Label size_not_zero; 387 Label size_not_zero;
391 __ cmp( Rscratch2, G0 ); 388 __ cmp( Rscratch2, G0 );
392 __ brx( Assembler::notEqual, false, Assembler::pn, size_not_zero ); 389 __ brx( Assembler::notEqual, false, Assembler::pn, size_not_zero );
393 __ delayed()->nop(); 390 __ delayed()->nop();
458 // is necessary. 455 // is necessary.
459 // 456 //
460 // (gri - 2/25/2000) 457 // (gri - 2/25/2000)
461 458
462 459
463 const Address size_of_parameters(G5_method, 0, in_bytes(methodOopDesc::size_of_parameters_offset())); 460 const Address size_of_parameters(G5_method, methodOopDesc::size_of_parameters_offset());
464 const Address size_of_locals (G5_method, 0, in_bytes(methodOopDesc::size_of_locals_offset())); 461 const Address size_of_locals (G5_method, methodOopDesc::size_of_locals_offset());
465 const Address max_stack (G5_method, 0, in_bytes(methodOopDesc::max_stack_offset())); 462 const Address max_stack (G5_method, methodOopDesc::max_stack_offset());
466 int rounded_vm_local_words = round_to( frame::interpreter_frame_vm_local_words, WordsPerLong ); 463 int rounded_vm_local_words = round_to( frame::interpreter_frame_vm_local_words, WordsPerLong );
467 464
468 const int extra_space = 465 const int extra_space =
469 rounded_vm_local_words + // frame local scratch space 466 rounded_vm_local_words + // frame local scratch space
470 //6815692//methodOopDesc::extra_stack_words() + // extra push slots for MH adapters 467 //6815692//methodOopDesc::extra_stack_words() + // extra push slots for MH adapters
537 // that all present references to Lbyte_code initialize the register 534 // that all present references to Lbyte_code initialize the register
538 // immediately before use 535 // immediately before use
539 if (native_call) { 536 if (native_call) {
540 __ mov(G0, Lbcp); 537 __ mov(G0, Lbcp);
541 } else { 538 } else {
542 __ ld_ptr(Address(G5_method, 0, in_bytes(methodOopDesc::const_offset())), Lbcp ); 539 __ ld_ptr(G5_method, methodOopDesc::const_offset(), Lbcp);
543 __ add(Address(Lbcp, 0, in_bytes(constMethodOopDesc::codes_offset())), Lbcp ); 540 __ add(Lbcp, in_bytes(constMethodOopDesc::codes_offset()), Lbcp);
544 } 541 }
545 __ mov( G5_method, Lmethod); // set Lmethod 542 __ mov( G5_method, Lmethod); // set Lmethod
546 __ get_constant_pool_cache( LcpoolCache ); // set LcpoolCache 543 __ get_constant_pool_cache( LcpoolCache ); // set LcpoolCache
547 __ sub(FP, rounded_vm_local_words * BytesPerWord, Lmonitors ); // set Lmonitors 544 __ sub(FP, rounded_vm_local_words * BytesPerWord, Lmonitors ); // set Lmonitors
548 #ifdef _LP64 545 #ifdef _LP64
576 __ verify_oop(G5_method); 573 __ verify_oop(G5_method);
577 574
578 // do nothing for empty methods (do not even increment invocation counter) 575 // do nothing for empty methods (do not even increment invocation counter)
579 if ( UseFastEmptyMethods) { 576 if ( UseFastEmptyMethods) {
580 // If we need a safepoint check, generate full interpreter entry. 577 // If we need a safepoint check, generate full interpreter entry.
581 Address sync_state(G3_scratch, SafepointSynchronize::address_of_state()); 578 AddressLiteral sync_state(SafepointSynchronize::address_of_state());
582 __ load_contents(sync_state, G3_scratch); 579 __ set(sync_state, G3_scratch);
583 __ cmp(G3_scratch, SafepointSynchronize::_not_synchronized); 580 __ cmp(G3_scratch, SafepointSynchronize::_not_synchronized);
584 __ br(Assembler::notEqual, false, Assembler::pn, slow_path); 581 __ br(Assembler::notEqual, false, Assembler::pn, slow_path);
585 __ delayed()->nop(); 582 __ delayed()->nop();
586 583
587 // Code: _return 584 // Code: _return
615 // XXX: for compressed oops pointer loading and decoding doesn't fit in 612 // XXX: for compressed oops pointer loading and decoding doesn't fit in
616 // delay slot and damages G1 613 // delay slot and damages G1
617 if ( UseFastAccessorMethods && !UseCompressedOops ) { 614 if ( UseFastAccessorMethods && !UseCompressedOops ) {
618 // Check if we need to reach a safepoint and generate full interpreter 615 // Check if we need to reach a safepoint and generate full interpreter
619 // frame if so. 616 // frame if so.
620 Address sync_state(G3_scratch, SafepointSynchronize::address_of_state()); 617 AddressLiteral sync_state(SafepointSynchronize::address_of_state());
621 __ load_contents(sync_state, G3_scratch); 618 __ load_contents(sync_state, G3_scratch);
622 __ cmp(G3_scratch, SafepointSynchronize::_not_synchronized); 619 __ cmp(G3_scratch, SafepointSynchronize::_not_synchronized);
623 __ br(Assembler::notEqual, false, Assembler::pn, slow_path); 620 __ br(Assembler::notEqual, false, Assembler::pn, slow_path);
624 __ delayed()->nop(); 621 __ delayed()->nop();
625 622
630 __ delayed()->nop(); 627 __ delayed()->nop();
631 628
632 629
633 // read first instruction word and extract bytecode @ 1 and index @ 2 630 // read first instruction word and extract bytecode @ 1 and index @ 2
634 // get first 4 bytes of the bytecodes (big endian!) 631 // get first 4 bytes of the bytecodes (big endian!)
635 __ ld_ptr(Address(G5_method, 0, in_bytes(methodOopDesc::const_offset())), G1_scratch); 632 __ ld_ptr(G5_method, methodOopDesc::const_offset(), G1_scratch);
636 __ ld(Address(G1_scratch, 0, in_bytes(constMethodOopDesc::codes_offset())), G1_scratch); 633 __ ld(G1_scratch, constMethodOopDesc::codes_offset(), G1_scratch);
637 634
638 // move index @ 2 far left then to the right most two bytes. 635 // move index @ 2 far left then to the right most two bytes.
639 __ sll(G1_scratch, 2*BitsPerByte, G1_scratch); 636 __ sll(G1_scratch, 2*BitsPerByte, G1_scratch);
640 __ srl(G1_scratch, 2*BitsPerByte - exact_log2(in_words( 637 __ srl(G1_scratch, 2*BitsPerByte - exact_log2(in_words(
641 ConstantPoolCacheEntry::size()) * BytesPerWord), G1_scratch); 638 ConstantPoolCacheEntry::size()) * BytesPerWord), G1_scratch);
642 639
643 // get constant pool cache 640 // get constant pool cache
644 __ ld_ptr(G5_method, in_bytes(methodOopDesc::constants_offset()), G3_scratch); 641 __ ld_ptr(G5_method, methodOopDesc::constants_offset(), G3_scratch);
645 __ ld_ptr(G3_scratch, constantPoolOopDesc::cache_offset_in_bytes(), G3_scratch); 642 __ ld_ptr(G3_scratch, constantPoolOopDesc::cache_offset_in_bytes(), G3_scratch);
646 643
647 // get specific constant pool cache entry 644 // get specific constant pool cache entry
648 __ add(G3_scratch, G1_scratch, G3_scratch); 645 __ add(G3_scratch, G1_scratch, G3_scratch);
649 646
650 // Check the constant Pool cache entry to see if it has been resolved. 647 // Check the constant Pool cache entry to see if it has been resolved.
651 // If not, need the slow path. 648 // If not, need the slow path.
652 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 649 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
653 __ ld_ptr(G3_scratch, in_bytes(cp_base_offset + ConstantPoolCacheEntry::indices_offset()), G1_scratch); 650 __ ld_ptr(G3_scratch, cp_base_offset + ConstantPoolCacheEntry::indices_offset(), G1_scratch);
654 __ srl(G1_scratch, 2*BitsPerByte, G1_scratch); 651 __ srl(G1_scratch, 2*BitsPerByte, G1_scratch);
655 __ and3(G1_scratch, 0xFF, G1_scratch); 652 __ and3(G1_scratch, 0xFF, G1_scratch);
656 __ cmp(G1_scratch, Bytecodes::_getfield); 653 __ cmp(G1_scratch, Bytecodes::_getfield);
657 __ br(Assembler::notEqual, false, Assembler::pn, slow_path); 654 __ br(Assembler::notEqual, false, Assembler::pn, slow_path);
658 __ delayed()->nop(); 655 __ delayed()->nop();
659 656
660 // Get the type and return field offset from the constant pool cache 657 // Get the type and return field offset from the constant pool cache
661 __ ld_ptr(G3_scratch, in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset()), G1_scratch); 658 __ ld_ptr(G3_scratch, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), G1_scratch);
662 __ ld_ptr(G3_scratch, in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset()), G3_scratch); 659 __ ld_ptr(G3_scratch, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), G3_scratch);
663 660
664 Label xreturn_path; 661 Label xreturn_path;
665 // Need to differentiate between igetfield, agetfield, bgetfield etc. 662 // Need to differentiate between igetfield, agetfield, bgetfield etc.
666 // because they are different sizes. 663 // because they are different sizes.
667 // Get the type from the constant pool cache 664 // Get the type from the constant pool cache
716 bool inc_counter = UseCompiler || CountCompiledCalls; 713 bool inc_counter = UseCompiler || CountCompiledCalls;
717 714
718 // make sure registers are different! 715 // make sure registers are different!
719 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2); 716 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
720 717
721 const Address Laccess_flags (Lmethod, 0, in_bytes(methodOopDesc::access_flags_offset())); 718 const Address Laccess_flags(Lmethod, methodOopDesc::access_flags_offset());
722 719
723 __ verify_oop(G5_method); 720 __ verify_oop(G5_method);
724 721
725 const Register Glocals_size = G3; 722 const Register Glocals_size = G3;
726 assert_different_registers(Glocals_size, G4_scratch, Gframe_size); 723 assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
727 724
728 // make sure method is native & not abstract 725 // make sure method is native & not abstract
729 // rethink these assertions - they can be simplified and shared (gri 2/25/2000) 726 // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
730 #ifdef ASSERT 727 #ifdef ASSERT
731 __ ld(G5_method, in_bytes(methodOopDesc::access_flags_offset()), Gtmp1); 728 __ ld(G5_method, methodOopDesc::access_flags_offset(), Gtmp1);
732 { 729 {
733 Label L; 730 Label L;
734 __ btst(JVM_ACC_NATIVE, Gtmp1); 731 __ btst(JVM_ACC_NATIVE, Gtmp1);
735 __ br(Assembler::notZero, false, Assembler::pt, L); 732 __ br(Assembler::notZero, false, Assembler::pt, L);
736 __ delayed()->nop(); 733 __ delayed()->nop();
753 // No locals to initialize for native method 750 // No locals to initialize for native method
754 // 751 //
755 752
756 // this slot will be set later, we initialize it to null here just in 753 // this slot will be set later, we initialize it to null here just in
757 // case we get a GC before the actual value is stored later 754 // case we get a GC before the actual value is stored later
758 __ st_ptr(G0, Address(FP, 0, (frame::interpreter_frame_oop_temp_offset*wordSize) + STACK_BIAS)); 755 __ st_ptr(G0, FP, (frame::interpreter_frame_oop_temp_offset * wordSize) + STACK_BIAS);
759 756
760 const Address do_not_unlock_if_synchronized(G2_thread, 0, 757 const Address do_not_unlock_if_synchronized(G2_thread,
761 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset())); 758 JavaThread::do_not_unlock_if_synchronized_offset());
762 // Since at this point in the method invocation the exception handler 759 // Since at this point in the method invocation the exception handler
763 // would try to exit the monitor of synchronized methods which hasn't 760 // would try to exit the monitor of synchronized methods which hasn't
764 // been entered yet, we set the thread local variable 761 // been entered yet, we set the thread local variable
765 // _do_not_unlock_if_synchronized to true. If any exception was thrown by 762 // _do_not_unlock_if_synchronized to true. If any exception was thrown by
766 // runtime, exception handling i.e. unlock_if_synchronized_method will 763 // runtime, exception handling i.e. unlock_if_synchronized_method will
823 820
824 // (note that the space for outgoing params is preallocated) 821 // (note that the space for outgoing params is preallocated)
825 822
826 // get signature handler 823 // get signature handler
827 { Label L; 824 { Label L;
828 __ ld_ptr(Address(Lmethod, 0, in_bytes(methodOopDesc::signature_handler_offset())), G3_scratch); 825 Address signature_handler(Lmethod, methodOopDesc::signature_handler_offset());
826 __ ld_ptr(signature_handler, G3_scratch);
829 __ tst(G3_scratch); 827 __ tst(G3_scratch);
830 __ brx(Assembler::notZero, false, Assembler::pt, L); 828 __ brx(Assembler::notZero, false, Assembler::pt, L);
831 __ delayed()->nop(); 829 __ delayed()->nop();
832 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), Lmethod); 830 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), Lmethod);
833 __ ld_ptr(Address(Lmethod, 0, in_bytes(methodOopDesc::signature_handler_offset())), G3_scratch); 831 __ ld_ptr(signature_handler, G3_scratch);
834 __ bind(L); 832 __ bind(L);
835 } 833 }
836 834
837 // Push a new frame so that the args will really be stored in 835 // Push a new frame so that the args will really be stored in
838 // Copy a few locals across so the new frame has the variables 836 // Copy a few locals across so the new frame has the variables
841 // frame (Lmethod in particular) 839 // frame (Lmethod in particular)
842 840
843 // Flush the method pointer to the register save area 841 // Flush the method pointer to the register save area
844 __ st_ptr(Lmethod, SP, (Lmethod->sp_offset_in_saved_window() * wordSize) + STACK_BIAS); 842 __ st_ptr(Lmethod, SP, (Lmethod->sp_offset_in_saved_window() * wordSize) + STACK_BIAS);
845 __ mov(Llocals, O1); 843 __ mov(Llocals, O1);
844
846 // calculate where the mirror handle body is allocated in the interpreter frame: 845 // calculate where the mirror handle body is allocated in the interpreter frame:
847 846 __ add(FP, (frame::interpreter_frame_oop_temp_offset * wordSize) + STACK_BIAS, O2);
848 Address mirror(FP, 0, (frame::interpreter_frame_oop_temp_offset*wordSize) + STACK_BIAS);
849 __ add(mirror, O2);
850 847
851 // Calculate current frame size 848 // Calculate current frame size
852 __ sub(SP, FP, O3); // Calculate negative of current frame size 849 __ sub(SP, FP, O3); // Calculate negative of current frame size
853 __ save(SP, O3, SP); // Allocate an identical sized frame 850 __ save(SP, O3, SP); // Allocate an identical sized frame
854 851
881 { Label not_static; 878 { Label not_static;
882 879
883 __ ld(Laccess_flags, O0); 880 __ ld(Laccess_flags, O0);
884 __ btst(JVM_ACC_STATIC, O0); 881 __ btst(JVM_ACC_STATIC, O0);
885 __ br( Assembler::zero, false, Assembler::pt, not_static); 882 __ br( Assembler::zero, false, Assembler::pt, not_static);
886 __ delayed()-> 883 // get native function entry point(O0 is a good temp until the very end)
887 // get native function entry point(O0 is a good temp until the very end) 884 __ delayed()->ld_ptr(Lmethod, in_bytes(methodOopDesc::native_function_offset()), O0);
888 ld_ptr(Address(Lmethod, 0, in_bytes(methodOopDesc::native_function_offset())), O0);
889 // for static methods insert the mirror argument 885 // for static methods insert the mirror argument
890 const int mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + Klass::java_mirror_offset_in_bytes(); 886 const int mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + Klass::java_mirror_offset_in_bytes();
891 887
892 __ ld_ptr(Address(Lmethod, 0, in_bytes(methodOopDesc:: constants_offset())), O1); 888 __ ld_ptr(Lmethod, methodOopDesc:: constants_offset(), O1);
893 __ ld_ptr(Address(O1, 0, constantPoolOopDesc::pool_holder_offset_in_bytes()), O1); 889 __ ld_ptr(O1, constantPoolOopDesc::pool_holder_offset_in_bytes(), O1);
894 __ ld_ptr(O1, mirror_offset, O1); 890 __ ld_ptr(O1, mirror_offset, O1);
895 #ifdef ASSERT 891 #ifdef ASSERT
896 if (!PrintSignatureHandlers) // do not dirty the output with this 892 if (!PrintSignatureHandlers) // do not dirty the output with this
897 { Label L; 893 { Label L;
898 __ tst(O1); 894 __ tst(O1);
943 // only the outer frames 939 // only the outer frames
944 940
945 __ flush_windows(); 941 __ flush_windows();
946 942
947 // mark windows as flushed 943 // mark windows as flushed
948 Address flags(G2_thread, 944 Address flags(G2_thread, JavaThread::frame_anchor_offset() + JavaFrameAnchor::flags_offset());
949 0,
950 in_bytes(JavaThread::frame_anchor_offset()) + in_bytes(JavaFrameAnchor::flags_offset()));
951 __ set(JavaFrameAnchor::flushed, G3_scratch); 945 __ set(JavaFrameAnchor::flushed, G3_scratch);
952 __ st(G3_scratch, flags); 946 __ st(G3_scratch, flags);
953 947
954 // Transition from _thread_in_Java to _thread_in_native. We are already safepoint ready. 948 // Transition from _thread_in_Java to _thread_in_native. We are already safepoint ready.
955 949
956 Address thread_state(G2_thread, 0, in_bytes(JavaThread::thread_state_offset())); 950 Address thread_state(G2_thread, JavaThread::thread_state_offset());
957 #ifdef ASSERT 951 #ifdef ASSERT
958 { Label L; 952 { Label L;
959 __ ld(thread_state, G3_scratch); 953 __ ld(thread_state, G3_scratch);
960 __ cmp(G3_scratch, _thread_in_Java); 954 __ cmp(G3_scratch, _thread_in_Java);
961 __ br(Assembler::equal, false, Assembler::pt, L); 955 __ br(Assembler::equal, false, Assembler::pt, L);
981 // must we block? 975 // must we block?
982 976
983 // Block, if necessary, before resuming in _thread_in_Java state. 977 // Block, if necessary, before resuming in _thread_in_Java state.
984 // In order for GC to work, don't clear the last_Java_sp until after blocking. 978 // In order for GC to work, don't clear the last_Java_sp until after blocking.
985 { Label no_block; 979 { Label no_block;
986 Address sync_state(G3_scratch, SafepointSynchronize::address_of_state()); 980 AddressLiteral sync_state(SafepointSynchronize::address_of_state());
987 981
988 // Switch thread to "native transition" state before reading the synchronization state. 982 // Switch thread to "native transition" state before reading the synchronization state.
989 // This additional state is necessary because reading and testing the synchronization 983 // This additional state is necessary because reading and testing the synchronization
990 // state is not atomic w.r.t. GC, as this scenario demonstrates: 984 // state is not atomic w.r.t. GC, as this scenario demonstrates:
991 // Java thread A, in _thread_in_native state, loads _not_synchronized and is preempted. 985 // Java thread A, in _thread_in_native state, loads _not_synchronized and is preempted.
1008 } 1002 }
1009 __ load_contents(sync_state, G3_scratch); 1003 __ load_contents(sync_state, G3_scratch);
1010 __ cmp(G3_scratch, SafepointSynchronize::_not_synchronized); 1004 __ cmp(G3_scratch, SafepointSynchronize::_not_synchronized);
1011 1005
1012 Label L; 1006 Label L;
1013 Address suspend_state(G2_thread, 0, in_bytes(JavaThread::suspend_flags_offset()));
1014 __ br(Assembler::notEqual, false, Assembler::pn, L); 1007 __ br(Assembler::notEqual, false, Assembler::pn, L);
1015 __ delayed()-> 1008 __ delayed()->ld(G2_thread, JavaThread::suspend_flags_offset(), G3_scratch);
1016 ld(suspend_state, G3_scratch);
1017 __ cmp(G3_scratch, 0); 1009 __ cmp(G3_scratch, 0);
1018 __ br(Assembler::equal, false, Assembler::pt, no_block); 1010 __ br(Assembler::equal, false, Assembler::pt, no_block);
1019 __ delayed()->nop(); 1011 __ delayed()->nop();
1020 __ bind(L); 1012 __ bind(L);
1021 1013
1053 1045
1054 __ set(_thread_in_Java, G3_scratch); 1046 __ set(_thread_in_Java, G3_scratch);
1055 __ st(G3_scratch, thread_state); 1047 __ st(G3_scratch, thread_state);
1056 1048
1057 // reset handle block 1049 // reset handle block
1058 __ ld_ptr(G2_thread, in_bytes(JavaThread::active_handles_offset()), G3_scratch); 1050 __ ld_ptr(G2_thread, JavaThread::active_handles_offset(), G3_scratch);
1059 __ st_ptr(G0, G3_scratch, JNIHandleBlock::top_offset_in_bytes()); 1051 __ st_ptr(G0, G3_scratch, JNIHandleBlock::top_offset_in_bytes());
1060 1052
1061 // If we have an oop result store it where it will be safe for any further gc 1053 // If we have an oop result store it where it will be safe for any further gc
1062 // until we return now that we've released the handle it might be protected by 1054 // until we return now that we've released the handle it might be protected by
1063 1055
1082 } 1074 }
1083 1075
1084 1076
1085 // handle exceptions (exception handling will handle unlocking!) 1077 // handle exceptions (exception handling will handle unlocking!)
1086 { Label L; 1078 { Label L;
1087 Address exception_addr (G2_thread, 0, in_bytes(Thread::pending_exception_offset())); 1079 Address exception_addr(G2_thread, Thread::pending_exception_offset());
1088
1089 __ ld_ptr(exception_addr, Gtemp); 1080 __ ld_ptr(exception_addr, Gtemp);
1090 __ tst(Gtemp); 1081 __ tst(Gtemp);
1091 __ brx(Assembler::equal, false, Assembler::pt, L); 1082 __ brx(Assembler::equal, false, Assembler::pt, L);
1092 __ delayed()->nop(); 1083 __ delayed()->nop();
1093 // Note: This could be handled more efficiently since we know that the native 1084 // Note: This could be handled more efficiently since we know that the native
1169 const Register Gtmp2 = G1_scratch; 1160 const Register Gtmp2 = G1_scratch;
1170 1161
1171 // make sure registers are different! 1162 // make sure registers are different!
1172 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2); 1163 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
1173 1164
1174 const Address size_of_parameters(G5_method, 0, in_bytes(methodOopDesc::size_of_parameters_offset())); 1165 const Address size_of_parameters(G5_method, methodOopDesc::size_of_parameters_offset());
1175 const Address size_of_locals (G5_method, 0, in_bytes(methodOopDesc::size_of_locals_offset())); 1166 const Address size_of_locals (G5_method, methodOopDesc::size_of_locals_offset());
1176 // Seems like G5_method is live at the point this is used. So we could make this look consistent 1167 // Seems like G5_method is live at the point this is used. So we could make this look consistent
1177 // and use in the asserts. 1168 // and use in the asserts.
1178 const Address access_flags (Lmethod, 0, in_bytes(methodOopDesc::access_flags_offset())); 1169 const Address access_flags (Lmethod, methodOopDesc::access_flags_offset());
1179 1170
1180 __ verify_oop(G5_method); 1171 __ verify_oop(G5_method);
1181 1172
1182 const Register Glocals_size = G3; 1173 const Register Glocals_size = G3;
1183 assert_different_registers(Glocals_size, G4_scratch, Gframe_size); 1174 assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
1184 1175
1185 // make sure method is not native & not abstract 1176 // make sure method is not native & not abstract
1186 // rethink these assertions - they can be simplified and shared (gri 2/25/2000) 1177 // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
1187 #ifdef ASSERT 1178 #ifdef ASSERT
1188 __ ld(G5_method, in_bytes(methodOopDesc::access_flags_offset()), Gtmp1); 1179 __ ld(G5_method, methodOopDesc::access_flags_offset(), Gtmp1);
1189 { 1180 {
1190 Label L; 1181 Label L;
1191 __ btst(JVM_ACC_NATIVE, Gtmp1); 1182 __ btst(JVM_ACC_NATIVE, Gtmp1);
1192 __ br(Assembler::zero, false, Assembler::pt, L); 1183 __ br(Assembler::zero, false, Assembler::pt, L);
1193 __ delayed()->nop(); 1184 __ delayed()->nop();
1238 1229
1239 __ cmp( O2, O1 ); 1230 __ cmp( O2, O1 );
1240 __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop ); 1231 __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, clear_loop );
1241 __ delayed()->st_ptr( init_value, O2, 0 ); 1232 __ delayed()->st_ptr( init_value, O2, 0 );
1242 1233
1243 const Address do_not_unlock_if_synchronized(G2_thread, 0, 1234 const Address do_not_unlock_if_synchronized(G2_thread,
1244 in_bytes(JavaThread::do_not_unlock_if_synchronized_offset())); 1235 JavaThread::do_not_unlock_if_synchronized_offset());
1245 // Since at this point in the method invocation the exception handler 1236 // Since at this point in the method invocation the exception handler
1246 // would try to exit the monitor of synchronized methods which hasn't 1237 // would try to exit the monitor of synchronized methods which hasn't
1247 // been entered yet, we set the thread local variable 1238 // been entered yet, we set the thread local variable
1248 // _do_not_unlock_if_synchronized to true. If any exception was thrown by 1239 // _do_not_unlock_if_synchronized to true. If any exception was thrown by
1249 // runtime, exception handling i.e. unlock_if_synchronized_method will 1240 // runtime, exception handling i.e. unlock_if_synchronized_method will
1715 // 1706 //
1716 // JVMTI PopFrame support 1707 // JVMTI PopFrame support
1717 // 1708 //
1718 1709
1719 Interpreter::_remove_activation_preserving_args_entry = __ pc(); 1710 Interpreter::_remove_activation_preserving_args_entry = __ pc();
1720 Address popframe_condition_addr (G2_thread, 0, in_bytes(JavaThread::popframe_condition_offset())); 1711 Address popframe_condition_addr(G2_thread, JavaThread::popframe_condition_offset());
1721 // Set the popframe_processing bit in popframe_condition indicating that we are 1712 // Set the popframe_processing bit in popframe_condition indicating that we are
1722 // currently handling popframe, so that call_VMs that may happen later do not trigger new 1713 // currently handling popframe, so that call_VMs that may happen later do not trigger new
1723 // popframe handling cycles. 1714 // popframe handling cycles.
1724 1715
1725 __ ld(popframe_condition_addr, G3_scratch); 1716 __ ld(popframe_condition_addr, G3_scratch);
1757 __ add(Gtmp2, wordSize, Gtmp2); 1748 __ add(Gtmp2, wordSize, Gtmp2);
1758 // Save these arguments 1749 // Save these arguments
1759 __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), G2_thread, Gtmp1, Gtmp2); 1750 __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), G2_thread, Gtmp1, Gtmp2);
1760 // Inform deoptimization that it is responsible for restoring these arguments 1751 // Inform deoptimization that it is responsible for restoring these arguments
1761 __ set(JavaThread::popframe_force_deopt_reexecution_bit, Gtmp1); 1752 __ set(JavaThread::popframe_force_deopt_reexecution_bit, Gtmp1);
1762 Address popframe_condition_addr(G2_thread, 0, in_bytes(JavaThread::popframe_condition_offset())); 1753 Address popframe_condition_addr(G2_thread, JavaThread::popframe_condition_offset());
1763 __ st(Gtmp1, popframe_condition_addr); 1754 __ st(Gtmp1, popframe_condition_addr);
1764 1755
1765 // Return from the current method 1756 // Return from the current method
1766 // The caller's SP was adjusted upon method entry to accomodate 1757 // The caller's SP was adjusted upon method entry to accomodate
1767 // the callee's non-argument locals. Undo that adjustment. 1758 // the callee's non-argument locals. Undo that adjustment.
1806 1797
1807 __ get_vm_result(Oexception); 1798 __ get_vm_result(Oexception);
1808 __ verify_oop(Oexception); 1799 __ verify_oop(Oexception);
1809 1800
1810 const int return_reg_adjustment = frame::pc_return_offset; 1801 const int return_reg_adjustment = frame::pc_return_offset;
1811 Address issuing_pc_addr(I7, 0, return_reg_adjustment); 1802 Address issuing_pc_addr(I7, return_reg_adjustment);
1812 1803
1813 // We are done with this activation frame; find out where to go next. 1804 // We are done with this activation frame; find out where to go next.
1814 // The continuation point will be an exception handler, which expects 1805 // The continuation point will be an exception handler, which expects
1815 // the following registers set up: 1806 // the following registers set up:
1816 // 1807 //
1852 address entry = __ pc(); 1843 address entry = __ pc();
1853 1844
1854 __ empty_expression_stack(); 1845 __ empty_expression_stack();
1855 __ load_earlyret_value(state); 1846 __ load_earlyret_value(state);
1856 1847
1857 __ ld_ptr(Address(G2_thread, 0, in_bytes(JavaThread::jvmti_thread_state_offset())), G3_scratch); 1848 __ ld_ptr(G2_thread, JavaThread::jvmti_thread_state_offset(), G3_scratch);
1858 Address cond_addr(G3_scratch, 0, in_bytes(JvmtiThreadState::earlyret_state_offset())); 1849 Address cond_addr(G3_scratch, JvmtiThreadState::earlyret_state_offset());
1859 1850
1860 // Clear the earlyret state 1851 // Clear the earlyret state
1861 __ stw(G0 /* JvmtiThreadState::earlyret_inactive */, cond_addr); 1852 __ stw(G0 /* JvmtiThreadState::earlyret_inactive */, cond_addr);
1862 1853
1863 __ remove_activation(state, 1854 __ remove_activation(state,
1920 1911
1921 1912
1922 // helpers for generate_and_dispatch 1913 // helpers for generate_and_dispatch
1923 1914
1924 void TemplateInterpreterGenerator::count_bytecode() { 1915 void TemplateInterpreterGenerator::count_bytecode() {
1925 Address c(G3_scratch, (address)&BytecodeCounter::_counter_value); 1916 __ inc_counter(&BytecodeCounter::_counter_value, G3_scratch, G4_scratch);
1926 __ load_contents(c, G4_scratch);
1927 __ inc(G4_scratch);
1928 __ st(G4_scratch, c);
1929 } 1917 }
1930 1918
1931 1919
1932 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) { 1920 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1933 Address bucket( G3_scratch, (address) &BytecodeHistogram::_counters[t->bytecode()] ); 1921 __ inc_counter(&BytecodeHistogram::_counters[t->bytecode()], G3_scratch, G4_scratch);
1934 __ load_contents(bucket, G4_scratch);
1935 __ inc(G4_scratch);
1936 __ st(G4_scratch, bucket);
1937 } 1922 }
1938 1923
1939 1924
1940 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) { 1925 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1941 address index_addr = (address)&BytecodePairHistogram::_index; 1926 AddressLiteral index (&BytecodePairHistogram::_index);
1942 Address index(G3_scratch, index_addr); 1927 AddressLiteral counters((address) &BytecodePairHistogram::_counters);
1943
1944 address counters_addr = (address)&BytecodePairHistogram::_counters;
1945 Address counters(G3_scratch, counters_addr);
1946 1928
1947 // get index, shift out old bytecode, bring in new bytecode, and store it 1929 // get index, shift out old bytecode, bring in new bytecode, and store it
1948 // _index = (_index >> log2_number_of_codes) | 1930 // _index = (_index >> log2_number_of_codes) |
1949 // (bytecode << log2_number_of_codes); 1931 // (bytecode << log2_number_of_codes);
1950 1932
1951 1933 __ load_contents(index, G4_scratch);
1952 __ load_contents( index, G4_scratch );
1953 __ srl( G4_scratch, BytecodePairHistogram::log2_number_of_codes, G4_scratch ); 1934 __ srl( G4_scratch, BytecodePairHistogram::log2_number_of_codes, G4_scratch );
1954 __ set( ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes, G3_scratch ); 1935 __ set( ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes, G3_scratch );
1955 __ or3( G3_scratch, G4_scratch, G4_scratch ); 1936 __ or3( G3_scratch, G4_scratch, G4_scratch );
1956 __ store_contents( G4_scratch, index ); 1937 __ store_contents(G4_scratch, index, G3_scratch);
1957 1938
1958 // bump bucket contents 1939 // bump bucket contents
1959 // _counters[_index] ++; 1940 // _counters[_index] ++;
1960 1941
1961 __ load_address( counters ); // loads into G3_scratch 1942 __ set(counters, G3_scratch); // loads into G3_scratch
1962 __ sll( G4_scratch, LogBytesPerWord, G4_scratch ); // Index is word address 1943 __ sll( G4_scratch, LogBytesPerWord, G4_scratch ); // Index is word address
1963 __ add (G3_scratch, G4_scratch, G3_scratch); // Add in index 1944 __ add (G3_scratch, G4_scratch, G3_scratch); // Add in index
1964 __ ld (G3_scratch, 0, G4_scratch); 1945 __ ld (G3_scratch, 0, G4_scratch);
1965 __ inc (G4_scratch); 1946 __ inc (G4_scratch);
1966 __ st (G4_scratch, 0, G3_scratch); 1947 __ st (G4_scratch, 0, G3_scratch);
1977 __ delayed()->nop(); 1958 __ delayed()->nop();
1978 } 1959 }
1979 1960
1980 1961
1981 void TemplateInterpreterGenerator::stop_interpreter_at() { 1962 void TemplateInterpreterGenerator::stop_interpreter_at() {
1982 Address counter(G3_scratch , (address)&BytecodeCounter::_counter_value); 1963 AddressLiteral counter(&BytecodeCounter::_counter_value);
1983 __ load_contents (counter, G3_scratch ); 1964 __ load_contents(counter, G3_scratch);
1984 Address stop_at(G4_scratch, (address)&StopInterpreterAt); 1965 AddressLiteral stop_at(&StopInterpreterAt);
1985 __ load_ptr_contents(stop_at, G4_scratch); 1966 __ load_ptr_contents(stop_at, G4_scratch);
1986 __ cmp(G3_scratch, G4_scratch); 1967 __ cmp(G3_scratch, G4_scratch);
1987 __ breakpoint_trap(Assembler::equal); 1968 __ breakpoint_trap(Assembler::equal);
1988 } 1969 }
1989 #endif // not PRODUCT 1970 #endif // not PRODUCT