comparison src/cpu/sparc/vm/templateInterpreter_sparc.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children b2dbd323c668 f0c2369fda5a
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
28 #include "interpreter/interpreter.hpp" 28 #include "interpreter/interpreter.hpp"
29 #include "interpreter/interpreterGenerator.hpp" 29 #include "interpreter/interpreterGenerator.hpp"
30 #include "interpreter/interpreterRuntime.hpp" 30 #include "interpreter/interpreterRuntime.hpp"
31 #include "interpreter/templateTable.hpp" 31 #include "interpreter/templateTable.hpp"
32 #include "oops/arrayOop.hpp" 32 #include "oops/arrayOop.hpp"
33 #include "oops/methodDataOop.hpp" 33 #include "oops/methodData.hpp"
34 #include "oops/methodOop.hpp" 34 #include "oops/method.hpp"
35 #include "oops/oop.inline.hpp" 35 #include "oops/oop.inline.hpp"
36 #include "prims/jvmtiExport.hpp" 36 #include "prims/jvmtiExport.hpp"
37 #include "prims/jvmtiThreadState.hpp" 37 #include "prims/jvmtiThreadState.hpp"
38 #include "runtime/arguments.hpp" 38 #include "runtime/arguments.hpp"
39 #include "runtime/deoptimization.hpp" 39 #include "runtime/deoptimization.hpp"
192 __ ldub(Address(Lbcp, 0), G1_scratch); // Load current bytecode. 192 __ ldub(Address(Lbcp, 0), G1_scratch); // Load current bytecode.
193 __ cmp_and_br_short(G1_scratch, Bytecodes::_invokedynamic, Assembler::equal, Assembler::pn, L_giant_index); 193 __ cmp_and_br_short(G1_scratch, Bytecodes::_invokedynamic, Assembler::equal, Assembler::pn, L_giant_index);
194 } 194 }
195 __ get_cache_and_index_at_bcp(cache, G1_scratch, 1); 195 __ get_cache_and_index_at_bcp(cache, G1_scratch, 1);
196 __ bind(L_got_cache); 196 __ bind(L_got_cache);
197 __ ld_ptr(cache, constantPoolCacheOopDesc::base_offset() + 197 __ ld_ptr(cache, ConstantPoolCache::base_offset() +
198 ConstantPoolCacheEntry::flags_offset(), size); 198 ConstantPoolCacheEntry::flags_offset(), size);
199 __ and3(size, 0xFF, size); // argument size in words 199 __ and3(size, 0xFF, size); // argument size in words
200 __ sll(size, Interpreter::logStackElementSize, size); // each argument size in bytes 200 __ sll(size, Interpreter::logStackElementSize, size); // each argument size in bytes
201 __ add(Lesp, size, Lesp); // pop arguments 201 __ add(Lesp, size, Lesp); // pop arguments
202 __ dispatch_next(state, step); 202 __ dispatch_next(state, step);
289 // 289 //
290 // Lmethod: method 290 // Lmethod: method
291 // ??: invocation counter 291 // ??: invocation counter
292 // 292 //
293 void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile_method, Label* profile_method_continue) { 293 void InterpreterGenerator::generate_counter_incr(Label* overflow, Label* profile_method, Label* profile_method_continue) {
294 // Note: In tiered we increment either counters in methodOop or in MDO depending if we're profiling or not. 294 // Note: In tiered we increment either counters in Method* or in MDO depending if we're profiling or not.
295 if (TieredCompilation) { 295 if (TieredCompilation) {
296 const int increment = InvocationCounter::count_increment; 296 const int increment = InvocationCounter::count_increment;
297 const int mask = ((1 << Tier0InvokeNotifyFreqLog) - 1) << InvocationCounter::count_shift; 297 const int mask = ((1 << Tier0InvokeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
298 Label no_mdo, done; 298 Label no_mdo, done;
299 if (ProfileInterpreter) { 299 if (ProfileInterpreter) {
300 // If no method data exists, go to profile_continue. 300 // If no method data exists, go to profile_continue.
301 __ ld_ptr(Lmethod, methodOopDesc::method_data_offset(), G4_scratch); 301 __ ld_ptr(Lmethod, Method::method_data_offset(), G4_scratch);
302 __ br_null_short(G4_scratch, Assembler::pn, no_mdo); 302 __ br_null_short(G4_scratch, Assembler::pn, no_mdo);
303 // Increment counter 303 // Increment counter
304 Address mdo_invocation_counter(G4_scratch, 304 Address mdo_invocation_counter(G4_scratch,
305 in_bytes(methodDataOopDesc::invocation_counter_offset()) + 305 in_bytes(MethodData::invocation_counter_offset()) +
306 in_bytes(InvocationCounter::counter_offset())); 306 in_bytes(InvocationCounter::counter_offset()));
307 __ increment_mask_and_jump(mdo_invocation_counter, increment, mask, 307 __ increment_mask_and_jump(mdo_invocation_counter, increment, mask,
308 G3_scratch, Lscratch, 308 G3_scratch, Lscratch,
309 Assembler::zero, overflow); 309 Assembler::zero, overflow);
310 __ ba_short(done); 310 __ ba_short(done);
311 } 311 }
312 312
313 // Increment counter in methodOop 313 // Increment counter in Method*
314 __ bind(no_mdo); 314 __ bind(no_mdo);
315 Address invocation_counter(Lmethod, 315 Address invocation_counter(Lmethod,
316 in_bytes(methodOopDesc::invocation_counter_offset()) + 316 in_bytes(Method::invocation_counter_offset()) +
317 in_bytes(InvocationCounter::counter_offset())); 317 in_bytes(InvocationCounter::counter_offset()));
318 __ increment_mask_and_jump(invocation_counter, increment, mask, 318 __ increment_mask_and_jump(invocation_counter, increment, mask,
319 G3_scratch, Lscratch, 319 G3_scratch, Lscratch,
320 Assembler::zero, overflow); 320 Assembler::zero, overflow);
321 __ bind(done); 321 __ bind(done);
322 } else { 322 } else {
323 // Update standard invocation counters 323 // Update standard invocation counters
324 __ increment_invocation_counter(O0, G3_scratch); 324 __ increment_invocation_counter(O0, G3_scratch);
325 if (ProfileInterpreter) { // %%% Merge this into methodDataOop 325 if (ProfileInterpreter) { // %%% Merge this into MethodData*
326 Address interpreter_invocation_counter(Lmethod,in_bytes(methodOopDesc::interpreter_invocation_counter_offset())); 326 Address interpreter_invocation_counter(Lmethod,in_bytes(Method::interpreter_invocation_counter_offset()));
327 __ ld(interpreter_invocation_counter, G3_scratch); 327 __ ld(interpreter_invocation_counter, G3_scratch);
328 __ inc(G3_scratch); 328 __ inc(G3_scratch);
329 __ st(G3_scratch, interpreter_invocation_counter); 329 __ st(G3_scratch, interpreter_invocation_counter);
330 } 330 }
331 331
347 } 347 }
348 348
349 } 349 }
350 350
351 // Allocate monitor and lock method (asm interpreter) 351 // Allocate monitor and lock method (asm interpreter)
352 // ebx - methodOop 352 // ebx - Method*
353 // 353 //
354 void InterpreterGenerator::lock_method(void) { 354 void InterpreterGenerator::lock_method(void) {
355 __ ld(Lmethod, in_bytes(methodOopDesc::access_flags_offset()), O0); // Load access flags. 355 __ ld(Lmethod, in_bytes(Method::access_flags_offset()), O0); // Load access flags.
356 356
357 #ifdef ASSERT 357 #ifdef ASSERT
358 { Label ok; 358 { Label ok;
359 __ btst(JVM_ACC_SYNCHRONIZED, O0); 359 __ btst(JVM_ACC_SYNCHRONIZED, O0);
360 __ br( Assembler::notZero, false, Assembler::pt, ok); 360 __ br( Assembler::notZero, false, Assembler::pt, ok);
369 const int mirror_offset = in_bytes(Klass::java_mirror_offset()); 369 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
370 __ btst(JVM_ACC_STATIC, O0); 370 __ btst(JVM_ACC_STATIC, O0);
371 __ br( Assembler::zero, true, Assembler::pt, done); 371 __ br( Assembler::zero, true, Assembler::pt, done);
372 __ delayed()->ld_ptr(Llocals, Interpreter::local_offset_in_bytes(0), O0); // get receiver for not-static case 372 __ delayed()->ld_ptr(Llocals, Interpreter::local_offset_in_bytes(0), O0); // get receiver for not-static case
373 373
374 __ ld_ptr( Lmethod, in_bytes(methodOopDesc::const_offset()), O0); 374 __ ld_ptr( Lmethod, in_bytes(Method::const_offset()), O0);
375 __ ld_ptr( O0, in_bytes(constMethodOopDesc::constants_offset()), O0); 375 __ ld_ptr( O0, in_bytes(ConstMethod::constants_offset()), O0);
376 __ ld_ptr( O0, constantPoolOopDesc::pool_holder_offset_in_bytes(), O0); 376 __ ld_ptr( O0, ConstantPool::pool_holder_offset_in_bytes(), O0);
377 377
378 // lock the mirror, not the klassOop 378 // lock the mirror, not the Klass*
379 __ ld_ptr( O0, mirror_offset, O0); 379 __ ld_ptr( O0, mirror_offset, O0);
380 380
381 #ifdef ASSERT 381 #ifdef ASSERT
382 __ tst(O0); 382 __ tst(O0);
383 __ breakpoint_trap(Assembler::zero, Assembler::ptr_cc); 383 __ breakpoint_trap(Assembler::zero, Assembler::ptr_cc);
492 // is necessary. 492 // is necessary.
493 // 493 //
494 // (gri - 2/25/2000) 494 // (gri - 2/25/2000)
495 495
496 496
497 const Address size_of_parameters(G5_method, methodOopDesc::size_of_parameters_offset()); 497 const Address size_of_parameters(G5_method, Method::size_of_parameters_offset());
498 const Address size_of_locals (G5_method, methodOopDesc::size_of_locals_offset()); 498 const Address size_of_locals (G5_method, Method::size_of_locals_offset());
499 const Address max_stack (G5_method, methodOopDesc::max_stack_offset()); 499 const Address max_stack (G5_method, Method::max_stack_offset());
500 int rounded_vm_local_words = round_to( frame::interpreter_frame_vm_local_words, WordsPerLong ); 500 int rounded_vm_local_words = round_to( frame::interpreter_frame_vm_local_words, WordsPerLong );
501 501
502 const int extra_space = 502 const int extra_space =
503 rounded_vm_local_words + // frame local scratch space 503 rounded_vm_local_words + // frame local scratch space
504 //6815692//methodOopDesc::extra_stack_words() + // extra push slots for MH adapters 504 //6815692//Method::extra_stack_words() + // extra push slots for MH adapters
505 frame::memory_parameter_word_sp_offset + // register save area 505 frame::memory_parameter_word_sp_offset + // register save area
506 (native_call ? frame::interpreter_frame_extra_outgoing_argument_words : 0); 506 (native_call ? frame::interpreter_frame_extra_outgoing_argument_words : 0);
507 507
508 const Register Glocals_size = G3; 508 const Register Glocals_size = G3;
509 const Register Otmp1 = O3; 509 const Register Otmp1 = O3;
571 // that all present references to Lbyte_code initialize the register 571 // that all present references to Lbyte_code initialize the register
572 // immediately before use 572 // immediately before use
573 if (native_call) { 573 if (native_call) {
574 __ mov(G0, Lbcp); 574 __ mov(G0, Lbcp);
575 } else { 575 } else {
576 __ ld_ptr(G5_method, methodOopDesc::const_offset(), Lbcp); 576 __ ld_ptr(G5_method, Method::const_offset(), Lbcp);
577 __ add(Lbcp, in_bytes(constMethodOopDesc::codes_offset()), Lbcp); 577 __ add(Lbcp, in_bytes(ConstMethod::codes_offset()), Lbcp);
578 } 578 }
579 __ mov( G5_method, Lmethod); // set Lmethod 579 __ mov( G5_method, Lmethod); // set Lmethod
580 __ get_constant_pool_cache( LcpoolCache ); // set LcpoolCache 580 __ get_constant_pool_cache( LcpoolCache ); // set LcpoolCache
581 __ sub(FP, rounded_vm_local_words * BytesPerWord, Lmonitors ); // set Lmonitors 581 __ sub(FP, rounded_vm_local_words * BytesPerWord, Lmonitors ); // set Lmonitors
582 #ifdef _LP64 582 #ifdef _LP64
604 604
605 // A method that does nother but return... 605 // A method that does nother but return...
606 606
607 address entry = __ pc(); 607 address entry = __ pc();
608 Label slow_path; 608 Label slow_path;
609
610 __ verify_oop(G5_method);
611 609
612 // do nothing for empty methods (do not even increment invocation counter) 610 // do nothing for empty methods (do not even increment invocation counter)
613 if ( UseFastEmptyMethods) { 611 if ( UseFastEmptyMethods) {
614 // If we need a safepoint check, generate full interpreter entry. 612 // If we need a safepoint check, generate full interpreter entry.
615 AddressLiteral sync_state(SafepointSynchronize::address_of_state()); 613 AddressLiteral sync_state(SafepointSynchronize::address_of_state());
660 __ br_null_short(Otos_i, Assembler::pn, slow_path); 658 __ br_null_short(Otos_i, Assembler::pn, slow_path);
661 659
662 660
663 // read first instruction word and extract bytecode @ 1 and index @ 2 661 // read first instruction word and extract bytecode @ 1 and index @ 2
664 // get first 4 bytes of the bytecodes (big endian!) 662 // get first 4 bytes of the bytecodes (big endian!)
665 __ ld_ptr(G5_method, methodOopDesc::const_offset(), G1_scratch); 663 __ ld_ptr(G5_method, Method::const_offset(), G1_scratch);
666 __ ld(G1_scratch, constMethodOopDesc::codes_offset(), G1_scratch); 664 __ ld(G1_scratch, ConstMethod::codes_offset(), G1_scratch);
667 665
668 // move index @ 2 far left then to the right most two bytes. 666 // move index @ 2 far left then to the right most two bytes.
669 __ sll(G1_scratch, 2*BitsPerByte, G1_scratch); 667 __ sll(G1_scratch, 2*BitsPerByte, G1_scratch);
670 __ srl(G1_scratch, 2*BitsPerByte - exact_log2(in_words( 668 __ srl(G1_scratch, 2*BitsPerByte - exact_log2(in_words(
671 ConstantPoolCacheEntry::size()) * BytesPerWord), G1_scratch); 669 ConstantPoolCacheEntry::size()) * BytesPerWord), G1_scratch);
672 670
673 // get constant pool cache 671 // get constant pool cache
674 __ ld_ptr(G5_method, methodOopDesc::const_offset(), G3_scratch); 672 __ ld_ptr(G5_method, Method::const_offset(), G3_scratch);
675 __ ld_ptr(G3_scratch, constMethodOopDesc::constants_offset(), G3_scratch); 673 __ ld_ptr(G3_scratch, ConstMethod::constants_offset(), G3_scratch);
676 __ ld_ptr(G3_scratch, constantPoolOopDesc::cache_offset_in_bytes(), G3_scratch); 674 __ ld_ptr(G3_scratch, ConstantPool::cache_offset_in_bytes(), G3_scratch);
677 675
678 // get specific constant pool cache entry 676 // get specific constant pool cache entry
679 __ add(G3_scratch, G1_scratch, G3_scratch); 677 __ add(G3_scratch, G1_scratch, G3_scratch);
680 678
681 // Check the constant Pool cache entry to see if it has been resolved. 679 // Check the constant Pool cache entry to see if it has been resolved.
682 // If not, need the slow path. 680 // If not, need the slow path.
683 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 681 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
684 __ ld_ptr(G3_scratch, cp_base_offset + ConstantPoolCacheEntry::indices_offset(), G1_scratch); 682 __ ld_ptr(G3_scratch, cp_base_offset + ConstantPoolCacheEntry::indices_offset(), G1_scratch);
685 __ srl(G1_scratch, 2*BitsPerByte, G1_scratch); 683 __ srl(G1_scratch, 2*BitsPerByte, G1_scratch);
686 __ and3(G1_scratch, 0xFF, G1_scratch); 684 __ and3(G1_scratch, 0xFF, G1_scratch);
687 __ cmp_and_br_short(G1_scratch, Bytecodes::_getfield, Assembler::notEqual, Assembler::pn, slow_path); 685 __ cmp_and_br_short(G1_scratch, Bytecodes::_getfield, Assembler::notEqual, Assembler::pn, slow_path);
688 686
825 bool inc_counter = UseCompiler || CountCompiledCalls; 823 bool inc_counter = UseCompiler || CountCompiledCalls;
826 824
827 // make sure registers are different! 825 // make sure registers are different!
828 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2); 826 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
829 827
830 const Address Laccess_flags(Lmethod, methodOopDesc::access_flags_offset()); 828 const Address Laccess_flags(Lmethod, Method::access_flags_offset());
831
832 __ verify_oop(G5_method);
833 829
834 const Register Glocals_size = G3; 830 const Register Glocals_size = G3;
835 assert_different_registers(Glocals_size, G4_scratch, Gframe_size); 831 assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
836 832
837 // make sure method is native & not abstract 833 // make sure method is native & not abstract
838 // rethink these assertions - they can be simplified and shared (gri 2/25/2000) 834 // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
839 #ifdef ASSERT 835 #ifdef ASSERT
840 __ ld(G5_method, methodOopDesc::access_flags_offset(), Gtmp1); 836 __ ld(G5_method, Method::access_flags_offset(), Gtmp1);
841 { 837 {
842 Label L; 838 Label L;
843 __ btst(JVM_ACC_NATIVE, Gtmp1); 839 __ btst(JVM_ACC_NATIVE, Gtmp1);
844 __ br(Assembler::notZero, false, Assembler::pt, L); 840 __ br(Assembler::notZero, false, Assembler::pt, L);
845 __ delayed()->nop(); 841 __ delayed()->nop();
932 928
933 // (note that the space for outgoing params is preallocated) 929 // (note that the space for outgoing params is preallocated)
934 930
935 // get signature handler 931 // get signature handler
936 { Label L; 932 { Label L;
937 Address signature_handler(Lmethod, methodOopDesc::signature_handler_offset()); 933 Address signature_handler(Lmethod, Method::signature_handler_offset());
938 __ ld_ptr(signature_handler, G3_scratch); 934 __ ld_ptr(signature_handler, G3_scratch);
939 __ br_notnull_short(G3_scratch, Assembler::pt, L); 935 __ br_notnull_short(G3_scratch, Assembler::pt, L);
940 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), Lmethod); 936 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), Lmethod);
941 __ ld_ptr(signature_handler, G3_scratch); 937 __ ld_ptr(signature_handler, G3_scratch);
942 __ bind(L); 938 __ bind(L);
989 985
990 __ ld(Laccess_flags, O0); 986 __ ld(Laccess_flags, O0);
991 __ btst(JVM_ACC_STATIC, O0); 987 __ btst(JVM_ACC_STATIC, O0);
992 __ br( Assembler::zero, false, Assembler::pt, not_static); 988 __ br( Assembler::zero, false, Assembler::pt, not_static);
993 // get native function entry point(O0 is a good temp until the very end) 989 // get native function entry point(O0 is a good temp until the very end)
994 __ delayed()->ld_ptr(Lmethod, in_bytes(methodOopDesc::native_function_offset()), O0); 990 __ delayed()->ld_ptr(Lmethod, in_bytes(Method::native_function_offset()), O0);
995 // for static methods insert the mirror argument 991 // for static methods insert the mirror argument
996 const int mirror_offset = in_bytes(Klass::java_mirror_offset()); 992 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
997 993
998 __ ld_ptr(Lmethod, methodOopDesc:: const_offset(), O1); 994 __ ld_ptr(Lmethod, Method:: const_offset(), O1);
999 __ ld_ptr(O1, constMethodOopDesc::constants_offset(), O1); 995 __ ld_ptr(O1, ConstMethod::constants_offset(), O1);
1000 __ ld_ptr(O1, constantPoolOopDesc::pool_holder_offset_in_bytes(), O1); 996 __ ld_ptr(O1, ConstantPool::pool_holder_offset_in_bytes(), O1);
1001 __ ld_ptr(O1, mirror_offset, O1); 997 __ ld_ptr(O1, mirror_offset, O1);
1002 #ifdef ASSERT 998 #ifdef ASSERT
1003 if (!PrintSignatureHandlers) // do not dirty the output with this 999 if (!PrintSignatureHandlers) // do not dirty the output with this
1004 { Label L; 1000 { Label L;
1005 __ br_notnull_short(O1, Assembler::pt, L); 1001 __ br_notnull_short(O1, Assembler::pt, L);
1257 const Register Gtmp2 = G1_scratch; 1253 const Register Gtmp2 = G1_scratch;
1258 1254
1259 // make sure registers are different! 1255 // make sure registers are different!
1260 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2); 1256 assert_different_registers(G2_thread, G5_method, Gargs, Gtmp1, Gtmp2);
1261 1257
1262 const Address size_of_parameters(G5_method, methodOopDesc::size_of_parameters_offset()); 1258 const Address size_of_parameters(G5_method, Method::size_of_parameters_offset());
1263 const Address size_of_locals (G5_method, methodOopDesc::size_of_locals_offset()); 1259 const Address size_of_locals (G5_method, Method::size_of_locals_offset());
1264 // Seems like G5_method is live at the point this is used. So we could make this look consistent 1260 // Seems like G5_method is live at the point this is used. So we could make this look consistent
1265 // and use in the asserts. 1261 // and use in the asserts.
1266 const Address access_flags (Lmethod, methodOopDesc::access_flags_offset()); 1262 const Address access_flags (Lmethod, Method::access_flags_offset());
1267
1268 __ verify_oop(G5_method);
1269 1263
1270 const Register Glocals_size = G3; 1264 const Register Glocals_size = G3;
1271 assert_different_registers(Glocals_size, G4_scratch, Gframe_size); 1265 assert_different_registers(Glocals_size, G4_scratch, Gframe_size);
1272 1266
1273 // make sure method is not native & not abstract 1267 // make sure method is not native & not abstract
1274 // rethink these assertions - they can be simplified and shared (gri 2/25/2000) 1268 // rethink these assertions - they can be simplified and shared (gri 2/25/2000)
1275 #ifdef ASSERT 1269 #ifdef ASSERT
1276 __ ld(G5_method, methodOopDesc::access_flags_offset(), Gtmp1); 1270 __ ld(G5_method, Method::access_flags_offset(), Gtmp1);
1277 { 1271 {
1278 Label L; 1272 Label L;
1279 __ btst(JVM_ACC_NATIVE, Gtmp1); 1273 __ btst(JVM_ACC_NATIVE, Gtmp1);
1280 __ br(Assembler::zero, false, Assembler::pt, L); 1274 __ br(Assembler::zero, false, Assembler::pt, L);
1281 __ delayed()->nop(); 1275 __ delayed()->nop();
1427 // 1421 //
1428 // C2 Calling Conventions: 1422 // C2 Calling Conventions:
1429 // 1423 //
1430 // The entry code below assumes that the following registers are set 1424 // The entry code below assumes that the following registers are set
1431 // when coming in: 1425 // when coming in:
1432 // G5_method: holds the methodOop of the method to call 1426 // G5_method: holds the Method* of the method to call
1433 // Lesp: points to the TOS of the callers expression stack 1427 // Lesp: points to the TOS of the callers expression stack
1434 // after having pushed all the parameters 1428 // after having pushed all the parameters
1435 // 1429 //
1436 // The entry code does the following to setup an interpreter frame 1430 // The entry code does the following to setup an interpreter frame
1437 // pop parameters from the callers stack by adjusting Lesp 1431 // pop parameters from the callers stack by adjusting Lesp
1545 // callee_locals and max_stack are counts, not the size in frame. 1539 // callee_locals and max_stack are counts, not the size in frame.
1546 const int locals_size = 1540 const int locals_size =
1547 round_to(callee_extra_locals * Interpreter::stackElementWords, WordsPerLong); 1541 round_to(callee_extra_locals * Interpreter::stackElementWords, WordsPerLong);
1548 const int max_stack_words = max_stack * Interpreter::stackElementWords; 1542 const int max_stack_words = max_stack * Interpreter::stackElementWords;
1549 return (round_to((max_stack_words 1543 return (round_to((max_stack_words
1550 //6815692//+ methodOopDesc::extra_stack_words() 1544 //6815692//+ Method::extra_stack_words()
1551 + rounded_vm_local_words 1545 + rounded_vm_local_words
1552 + frame::memory_parameter_word_sp_offset), WordsPerLong) 1546 + frame::memory_parameter_word_sp_offset), WordsPerLong)
1553 // already rounded 1547 // already rounded
1554 + locals_size + monitor_size); 1548 + locals_size + monitor_size);
1555 } 1549 }
1556 1550
1557 // How much stack a method top interpreter activation needs in words. 1551 // How much stack a method top interpreter activation needs in words.
1558 int AbstractInterpreter::size_top_interpreter_activation(methodOop method) { 1552 int AbstractInterpreter::size_top_interpreter_activation(Method* method) {
1559 1553
1560 // See call_stub code 1554 // See call_stub code
1561 int call_stub_size = round_to(7 + frame::memory_parameter_word_sp_offset, 1555 int call_stub_size = round_to(7 + frame::memory_parameter_word_sp_offset,
1562 WordsPerLong); // 7 + register save area 1556 WordsPerLong); // 7 + register save area
1563 1557
1567 1*frame::interpreter_frame_monitor_size() : 0; 1561 1*frame::interpreter_frame_monitor_size() : 0;
1568 return size_activation_helper(method->max_locals(), method->max_stack(), 1562 return size_activation_helper(method->max_locals(), method->max_stack(),
1569 monitor_size) + call_stub_size; 1563 monitor_size) + call_stub_size;
1570 } 1564 }
1571 1565
1572 int AbstractInterpreter::layout_activation(methodOop method, 1566 int AbstractInterpreter::layout_activation(Method* method,
1573 int tempcount, 1567 int tempcount,
1574 int popframe_extra_args, 1568 int popframe_extra_args,
1575 int moncount, 1569 int moncount,
1576 int caller_actual_parameters, 1570 int caller_actual_parameters,
1577 int callee_param_count, 1571 int callee_param_count,
1828 1822
1829 const Register Gtmp1 = G3_scratch; 1823 const Register Gtmp1 = G3_scratch;
1830 const Register Gtmp2 = G1_scratch; 1824 const Register Gtmp2 = G1_scratch;
1831 1825
1832 // Compute size of arguments for saving when returning to deoptimized caller 1826 // Compute size of arguments for saving when returning to deoptimized caller
1833 __ lduh(Lmethod, in_bytes(methodOopDesc::size_of_parameters_offset()), Gtmp1); 1827 __ lduh(Lmethod, in_bytes(Method::size_of_parameters_offset()), Gtmp1);
1834 __ sll(Gtmp1, Interpreter::logStackElementSize, Gtmp1); 1828 __ sll(Gtmp1, Interpreter::logStackElementSize, Gtmp1);
1835 __ sub(Llocals, Gtmp1, Gtmp2); 1829 __ sub(Llocals, Gtmp1, Gtmp2);
1836 __ add(Gtmp2, wordSize, Gtmp2); 1830 __ add(Gtmp2, wordSize, Gtmp2);
1837 // Save these arguments 1831 // Save these arguments
1838 __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), G2_thread, Gtmp1, Gtmp2); 1832 __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, Deoptimization::popframe_preserve_args), G2_thread, Gtmp1, Gtmp2);
1875 // Intel has the following comment: 1869 // Intel has the following comment:
1876 //// remove the activation (without doing throws on illegalMonitorExceptions) 1870 //// remove the activation (without doing throws on illegalMonitorExceptions)
1877 // They remove the activation without checking for bad monitor state. 1871 // They remove the activation without checking for bad monitor state.
1878 // %%% We should make sure this is the right semantics before implementing. 1872 // %%% We should make sure this is the right semantics before implementing.
1879 1873
1880 // %%% changed set_vm_result_2 to set_vm_result and get_vm_result_2 to get_vm_result. Is there a bug here?
1881 __ set_vm_result(Oexception); 1874 __ set_vm_result(Oexception);
1882 __ unlock_if_synchronized_method(vtos, /* throw_monitor_exception */ false); 1875 __ unlock_if_synchronized_method(vtos, /* throw_monitor_exception */ false);
1883 1876
1884 __ notify_method_exit(false, vtos, InterpreterMacroAssembler::SkipNotifyJVMTI); 1877 __ notify_method_exit(false, vtos, InterpreterMacroAssembler::SkipNotifyJVMTI);
1885 1878