Mercurial > hg > graal-jvmci-8
comparison src/cpu/x86/vm/templateInterpreter_x86_64.cpp @ 304:dc7f315e41f7
5108146: Merge i486 and amd64 cpu directories
6459804: Want client (c1) compiler for x86_64 (amd64) for faster start-up
Reviewed-by: kvn
author | never |
---|---|
date | Wed, 27 Aug 2008 00:21:55 -0700 |
parents | d1605aabd0a1 |
children | 98cb887364d3 |
comparison
equal
deleted
inserted
replaced
303:fa4d1d240383 | 304:dc7f315e41f7 |
---|---|
25 #include "incls/_precompiled.incl" | 25 #include "incls/_precompiled.incl" |
26 #include "incls/_interpreter_x86_64.cpp.incl" | 26 #include "incls/_interpreter_x86_64.cpp.incl" |
27 | 27 |
28 #define __ _masm-> | 28 #define __ _masm-> |
29 | 29 |
30 #ifndef CC_INTERP | |
31 | |
30 const int method_offset = frame::interpreter_frame_method_offset * wordSize; | 32 const int method_offset = frame::interpreter_frame_method_offset * wordSize; |
31 const int bci_offset = frame::interpreter_frame_bcx_offset * wordSize; | 33 const int bci_offset = frame::interpreter_frame_bcx_offset * wordSize; |
32 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize; | 34 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize; |
33 | 35 |
34 //----------------------------------------------------------------------------- | 36 //----------------------------------------------------------------------------- |
37 address entry = __ pc(); | 39 address entry = __ pc(); |
38 | 40 |
39 #ifdef ASSERT | 41 #ifdef ASSERT |
40 { | 42 { |
41 Label L; | 43 Label L; |
42 __ leaq(rax, Address(rbp, | 44 __ lea(rax, Address(rbp, |
43 frame::interpreter_frame_monitor_block_top_offset * | 45 frame::interpreter_frame_monitor_block_top_offset * |
44 wordSize)); | 46 wordSize)); |
45 __ cmpq(rax, rsp); // rax = maximal rsp for current rbp (stack | 47 __ cmpptr(rax, rsp); // rax = maximal rsp for current rbp (stack |
46 // grows negative) | 48 // grows negative) |
47 __ jcc(Assembler::aboveEqual, L); // check if frame is complete | 49 __ jcc(Assembler::aboveEqual, L); // check if frame is complete |
48 __ stop ("interpreter frame not set up"); | 50 __ stop ("interpreter frame not set up"); |
49 __ bind(L); | 51 __ bind(L); |
50 } | 52 } |
51 #endif // ASSERT | 53 #endif // ASSERT |
82 | 84 |
83 address TemplateInterpreterGenerator::generate_ClassCastException_handler() { | 85 address TemplateInterpreterGenerator::generate_ClassCastException_handler() { |
84 address entry = __ pc(); | 86 address entry = __ pc(); |
85 | 87 |
86 // object is at TOS | 88 // object is at TOS |
87 __ popq(c_rarg1); | 89 __ pop(c_rarg1); |
88 | 90 |
89 // expression stack must be empty before entering the VM if an | 91 // expression stack must be empty before entering the VM if an |
90 // exception happened | 92 // exception happened |
91 __ empty_expression_stack(); | 93 __ empty_expression_stack(); |
92 | 94 |
102 const char* name, const char* message, bool pass_oop) { | 104 const char* name, const char* message, bool pass_oop) { |
103 assert(!pass_oop || message == NULL, "either oop or message but not both"); | 105 assert(!pass_oop || message == NULL, "either oop or message but not both"); |
104 address entry = __ pc(); | 106 address entry = __ pc(); |
105 if (pass_oop) { | 107 if (pass_oop) { |
106 // object is at TOS | 108 // object is at TOS |
107 __ popq(c_rarg2); | 109 __ pop(c_rarg2); |
108 } | 110 } |
109 // expression stack must be empty before entering the VM if an | 111 // expression stack must be empty before entering the VM if an |
110 // exception happened | 112 // exception happened |
111 __ empty_expression_stack(); | 113 __ empty_expression_stack(); |
112 // setup parameters | 114 // setup parameters |
135 | 137 |
136 | 138 |
137 address TemplateInterpreterGenerator::generate_continuation_for(TosState state) { | 139 address TemplateInterpreterGenerator::generate_continuation_for(TosState state) { |
138 address entry = __ pc(); | 140 address entry = __ pc(); |
139 // NULL last_sp until next java call | 141 // NULL last_sp until next java call |
140 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD); | 142 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD); |
141 __ dispatch_next(state); | 143 __ dispatch_next(state); |
142 return entry; | 144 return entry; |
143 } | 145 } |
144 | 146 |
145 | 147 |
151 // here and the specialized cleanup code is not needed here. | 153 // here and the specialized cleanup code is not needed here. |
152 | 154 |
153 address entry = __ pc(); | 155 address entry = __ pc(); |
154 | 156 |
155 // Restore stack bottom in case i2c adjusted stack | 157 // Restore stack bottom in case i2c adjusted stack |
156 __ movq(rsp, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize)); | 158 __ movptr(rsp, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize)); |
157 // and NULL it as marker that esp is now tos until next java call | 159 // and NULL it as marker that esp is now tos until next java call |
158 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD); | 160 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD); |
159 | 161 |
160 __ restore_bcp(); | 162 __ restore_bcp(); |
161 __ restore_locals(); | 163 __ restore_locals(); |
164 | |
162 __ get_cache_and_index_at_bcp(rbx, rcx, 1); | 165 __ get_cache_and_index_at_bcp(rbx, rcx, 1); |
163 __ movl(rbx, Address(rbx, rcx, | 166 __ movl(rbx, Address(rbx, rcx, |
164 Address::times_8, | 167 Address::times_8, |
165 in_bytes(constantPoolCacheOopDesc::base_offset()) + | 168 in_bytes(constantPoolCacheOopDesc::base_offset()) + |
166 3 * wordSize)); | 169 3 * wordSize)); |
167 __ andl(rbx, 0xFF); | 170 __ andl(rbx, 0xFF); |
168 if (TaggedStackInterpreter) __ shll(rbx, 1); // 2 slots per parameter. | 171 if (TaggedStackInterpreter) __ shll(rbx, 1); // 2 slots per parameter. |
169 __ leaq(rsp, Address(rsp, rbx, Address::times_8)); | 172 __ lea(rsp, Address(rsp, rbx, Address::times_8)); |
170 __ dispatch_next(state, step); | 173 __ dispatch_next(state, step); |
171 return entry; | 174 return entry; |
172 } | 175 } |
173 | 176 |
174 | 177 |
175 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, | 178 address TemplateInterpreterGenerator::generate_deopt_entry_for(TosState state, |
176 int step) { | 179 int step) { |
177 address entry = __ pc(); | 180 address entry = __ pc(); |
178 // NULL last_sp until next java call | 181 // NULL last_sp until next java call |
179 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD); | 182 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD); |
180 __ restore_bcp(); | 183 __ restore_bcp(); |
181 __ restore_locals(); | 184 __ restore_locals(); |
182 // handle exceptions | 185 // handle exceptions |
183 { | 186 { |
184 Label L; | 187 Label L; |
185 __ cmpq(Address(r15_thread, Thread::pending_exception_offset()), (int) NULL); | 188 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t) NULL_WORD); |
186 __ jcc(Assembler::zero, L); | 189 __ jcc(Assembler::zero, L); |
187 __ call_VM(noreg, | 190 __ call_VM(noreg, |
188 CAST_FROM_FN_PTR(address, | 191 CAST_FROM_FN_PTR(address, |
189 InterpreterRuntime::throw_pending_exception)); | 192 InterpreterRuntime::throw_pending_exception)); |
190 __ should_not_reach_here(); | 193 __ should_not_reach_here(); |
229 case T_VOID : /* nothing to do */ break; | 232 case T_VOID : /* nothing to do */ break; |
230 case T_FLOAT : /* nothing to do */ break; | 233 case T_FLOAT : /* nothing to do */ break; |
231 case T_DOUBLE : /* nothing to do */ break; | 234 case T_DOUBLE : /* nothing to do */ break; |
232 case T_OBJECT : | 235 case T_OBJECT : |
233 // retrieve result from frame | 236 // retrieve result from frame |
234 __ movq(rax, Address(rbp, frame::interpreter_frame_oop_temp_offset*wordSize)); | 237 __ movptr(rax, Address(rbp, frame::interpreter_frame_oop_temp_offset*wordSize)); |
235 // and verify it | 238 // and verify it |
236 __ verify_oop(rax); | 239 __ verify_oop(rax); |
237 break; | 240 break; |
238 default : ShouldNotReachHere(); | 241 default : ShouldNotReachHere(); |
239 } | 242 } |
334 __ call_VM(noreg, | 337 __ call_VM(noreg, |
335 CAST_FROM_FN_PTR(address, | 338 CAST_FROM_FN_PTR(address, |
336 InterpreterRuntime::frequency_counter_overflow), | 339 InterpreterRuntime::frequency_counter_overflow), |
337 c_rarg1); | 340 c_rarg1); |
338 | 341 |
339 __ movq(rbx, Address(rbp, method_offset)); // restore methodOop | 342 __ movptr(rbx, Address(rbp, method_offset)); // restore methodOop |
340 // Preserve invariant that r13/r14 contain bcp/locals of sender frame | 343 // Preserve invariant that r13/r14 contain bcp/locals of sender frame |
341 // and jump to the interpreted entry. | 344 // and jump to the interpreted entry. |
342 __ jmp(*do_continue, relocInfo::none); | 345 __ jmp(*do_continue, relocInfo::none); |
343 } | 346 } |
344 | 347 |
383 | 386 |
384 const Address stack_base(r15_thread, Thread::stack_base_offset()); | 387 const Address stack_base(r15_thread, Thread::stack_base_offset()); |
385 const Address stack_size(r15_thread, Thread::stack_size_offset()); | 388 const Address stack_size(r15_thread, Thread::stack_size_offset()); |
386 | 389 |
387 // locals + overhead, in bytes | 390 // locals + overhead, in bytes |
388 __ movq(rax, rdx); | 391 __ mov(rax, rdx); |
389 __ shll(rax, Interpreter::logStackElementSize()); // 2 slots per parameter. | 392 __ shlptr(rax, Interpreter::logStackElementSize()); // 2 slots per parameter. |
390 __ addq(rax, overhead_size); | 393 __ addptr(rax, overhead_size); |
391 | 394 |
392 #ifdef ASSERT | 395 #ifdef ASSERT |
393 Label stack_base_okay, stack_size_okay; | 396 Label stack_base_okay, stack_size_okay; |
394 // verify that thread stack base is non-zero | 397 // verify that thread stack base is non-zero |
395 __ cmpq(stack_base, 0); | 398 __ cmpptr(stack_base, (int32_t)NULL_WORD); |
396 __ jcc(Assembler::notEqual, stack_base_okay); | 399 __ jcc(Assembler::notEqual, stack_base_okay); |
397 __ stop("stack base is zero"); | 400 __ stop("stack base is zero"); |
398 __ bind(stack_base_okay); | 401 __ bind(stack_base_okay); |
399 // verify that thread stack size is non-zero | 402 // verify that thread stack size is non-zero |
400 __ cmpq(stack_size, 0); | 403 __ cmpptr(stack_size, 0); |
401 __ jcc(Assembler::notEqual, stack_size_okay); | 404 __ jcc(Assembler::notEqual, stack_size_okay); |
402 __ stop("stack size is zero"); | 405 __ stop("stack size is zero"); |
403 __ bind(stack_size_okay); | 406 __ bind(stack_size_okay); |
404 #endif | 407 #endif |
405 | 408 |
406 // Add stack base to locals and subtract stack size | 409 // Add stack base to locals and subtract stack size |
407 __ addq(rax, stack_base); | 410 __ addptr(rax, stack_base); |
408 __ subq(rax, stack_size); | 411 __ subptr(rax, stack_size); |
409 | 412 |
410 // add in the red and yellow zone sizes | 413 // add in the red and yellow zone sizes |
411 __ addq(rax, (StackRedPages + StackYellowPages) * page_size); | 414 __ addptr(rax, (StackRedPages + StackYellowPages) * page_size); |
412 | 415 |
413 // check against the current stack bottom | 416 // check against the current stack bottom |
414 __ cmpq(rsp, rax); | 417 __ cmpptr(rsp, rax); |
415 __ jcc(Assembler::above, after_frame_check); | 418 __ jcc(Assembler::above, after_frame_check); |
416 | 419 |
417 __ popq(rax); // get return address | 420 __ pop(rax); // get return address |
418 __ jump(ExternalAddress(Interpreter::throw_StackOverflowError_entry())); | 421 __ jump(ExternalAddress(Interpreter::throw_StackOverflowError_entry())); |
419 | 422 |
420 // all done with frame size check | 423 // all done with frame size check |
421 __ bind(after_frame_check); | 424 __ bind(after_frame_check); |
422 } | 425 } |
456 Klass::java_mirror_offset_in_bytes(); | 459 Klass::java_mirror_offset_in_bytes(); |
457 Label done; | 460 Label done; |
458 __ movl(rax, access_flags); | 461 __ movl(rax, access_flags); |
459 __ testl(rax, JVM_ACC_STATIC); | 462 __ testl(rax, JVM_ACC_STATIC); |
460 // get receiver (assume this is frequent case) | 463 // get receiver (assume this is frequent case) |
461 __ movq(rax, Address(r14, Interpreter::local_offset_in_bytes(0))); | 464 __ movptr(rax, Address(r14, Interpreter::local_offset_in_bytes(0))); |
462 __ jcc(Assembler::zero, done); | 465 __ jcc(Assembler::zero, done); |
463 __ movq(rax, Address(rbx, methodOopDesc::constants_offset())); | 466 __ movptr(rax, Address(rbx, methodOopDesc::constants_offset())); |
464 __ movq(rax, Address(rax, | 467 __ movptr(rax, Address(rax, |
465 constantPoolOopDesc::pool_holder_offset_in_bytes())); | 468 constantPoolOopDesc::pool_holder_offset_in_bytes())); |
466 __ movq(rax, Address(rax, mirror_offset)); | 469 __ movptr(rax, Address(rax, mirror_offset)); |
467 | 470 |
468 #ifdef ASSERT | 471 #ifdef ASSERT |
469 { | 472 { |
470 Label L; | 473 Label L; |
471 __ testq(rax, rax); | 474 __ testptr(rax, rax); |
472 __ jcc(Assembler::notZero, L); | 475 __ jcc(Assembler::notZero, L); |
473 __ stop("synchronization object is NULL"); | 476 __ stop("synchronization object is NULL"); |
474 __ bind(L); | 477 __ bind(L); |
475 } | 478 } |
476 #endif // ASSERT | 479 #endif // ASSERT |
477 | 480 |
478 __ bind(done); | 481 __ bind(done); |
479 } | 482 } |
480 | 483 |
481 // add space for monitor & lock | 484 // add space for monitor & lock |
482 __ subq(rsp, entry_size); // add space for a monitor entry | 485 __ subptr(rsp, entry_size); // add space for a monitor entry |
483 __ movq(monitor_block_top, rsp); // set new monitor block top | 486 __ movptr(monitor_block_top, rsp); // set new monitor block top |
484 // store object | 487 // store object |
485 __ movq(Address(rsp, BasicObjectLock::obj_offset_in_bytes()), rax); | 488 __ movptr(Address(rsp, BasicObjectLock::obj_offset_in_bytes()), rax); |
486 __ movq(c_rarg1, rsp); // object address | 489 __ movptr(c_rarg1, rsp); // object address |
487 __ lock_object(c_rarg1); | 490 __ lock_object(c_rarg1); |
488 } | 491 } |
489 | 492 |
490 // Generate a fixed interpreter frame. This is identical setup for | 493 // Generate a fixed interpreter frame. This is identical setup for |
491 // interpreted methods and for native methods hence the shared code. | 494 // interpreted methods and for native methods hence the shared code. |
496 // r14: pointer to locals | 499 // r14: pointer to locals |
497 // r13: sender sp | 500 // r13: sender sp |
498 // rdx: cp cache | 501 // rdx: cp cache |
499 void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) { | 502 void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) { |
500 // initialize fixed part of activation frame | 503 // initialize fixed part of activation frame |
501 __ pushq(rax); // save return address | 504 __ push(rax); // save return address |
502 __ enter(); // save old & set new rbp | 505 __ enter(); // save old & set new rbp |
503 __ pushq(r13); // set sender sp | 506 __ push(r13); // set sender sp |
504 __ pushq((int)NULL_WORD); // leave last_sp as null | 507 __ push((int)NULL_WORD); // leave last_sp as null |
505 __ movq(r13, Address(rbx, methodOopDesc::const_offset())); // get constMethodOop | 508 __ movptr(r13, Address(rbx, methodOopDesc::const_offset())); // get constMethodOop |
506 __ leaq(r13, Address(r13, constMethodOopDesc::codes_offset())); // get codebase | 509 __ lea(r13, Address(r13, constMethodOopDesc::codes_offset())); // get codebase |
507 __ pushq(rbx); // save methodOop | 510 __ push(rbx); // save methodOop |
508 if (ProfileInterpreter) { | 511 if (ProfileInterpreter) { |
509 Label method_data_continue; | 512 Label method_data_continue; |
510 __ movq(rdx, Address(rbx, in_bytes(methodOopDesc::method_data_offset()))); | 513 __ movptr(rdx, Address(rbx, in_bytes(methodOopDesc::method_data_offset()))); |
511 __ testq(rdx, rdx); | 514 __ testptr(rdx, rdx); |
512 __ jcc(Assembler::zero, method_data_continue); | 515 __ jcc(Assembler::zero, method_data_continue); |
513 __ addq(rdx, in_bytes(methodDataOopDesc::data_offset())); | 516 __ addptr(rdx, in_bytes(methodDataOopDesc::data_offset())); |
514 __ bind(method_data_continue); | 517 __ bind(method_data_continue); |
515 __ pushq(rdx); // set the mdp (method data pointer) | 518 __ push(rdx); // set the mdp (method data pointer) |
516 } else { | 519 } else { |
517 __ pushq(0); | 520 __ push(0); |
518 } | 521 } |
519 | 522 |
520 __ movq(rdx, Address(rbx, methodOopDesc::constants_offset())); | 523 __ movptr(rdx, Address(rbx, methodOopDesc::constants_offset())); |
521 __ movq(rdx, Address(rdx, constantPoolOopDesc::cache_offset_in_bytes())); | 524 __ movptr(rdx, Address(rdx, constantPoolOopDesc::cache_offset_in_bytes())); |
522 __ pushq(rdx); // set constant pool cache | 525 __ push(rdx); // set constant pool cache |
523 __ pushq(r14); // set locals pointer | 526 __ push(r14); // set locals pointer |
524 if (native_call) { | 527 if (native_call) { |
525 __ pushq(0); // no bcp | 528 __ push(0); // no bcp |
526 } else { | 529 } else { |
527 __ pushq(r13); // set bcp | 530 __ push(r13); // set bcp |
528 } | 531 } |
529 __ pushq(0); // reserve word for pointer to expression stack bottom | 532 __ push(0); // reserve word for pointer to expression stack bottom |
530 __ movq(Address(rsp, 0), rsp); // set expression stack bottom | 533 __ movptr(Address(rsp, 0), rsp); // set expression stack bottom |
531 } | 534 } |
532 | 535 |
533 // End of helpers | 536 // End of helpers |
537 | |
538 // Various method entries | |
539 //------------------------------------------------------------------------------------------------------------------------ | |
540 // | |
541 // | |
542 | |
543 // Call an accessor method (assuming it is resolved, otherwise drop | |
544 // into vanilla (slow path) entry | |
545 address InterpreterGenerator::generate_accessor_entry(void) { | |
546 // rbx: methodOop | |
547 | |
548 // r13: senderSP must preserver for slow path, set SP to it on fast path | |
549 | |
550 address entry_point = __ pc(); | |
551 Label xreturn_path; | |
552 | |
553 // do fastpath for resolved accessor methods | |
554 if (UseFastAccessorMethods) { | |
555 // Code: _aload_0, _(i|a)getfield, _(i|a)return or any rewrites | |
556 // thereof; parameter size = 1 | |
557 // Note: We can only use this code if the getfield has been resolved | |
558 // and if we don't have a null-pointer exception => check for | |
559 // these conditions first and use slow path if necessary. | |
560 Label slow_path; | |
561 // If we need a safepoint check, generate full interpreter entry. | |
562 __ cmp32(ExternalAddress(SafepointSynchronize::address_of_state()), | |
563 SafepointSynchronize::_not_synchronized); | |
564 | |
565 __ jcc(Assembler::notEqual, slow_path); | |
566 // rbx: method | |
567 __ movptr(rax, Address(rsp, wordSize)); | |
568 | |
569 // check if local 0 != NULL and read field | |
570 __ testptr(rax, rax); | |
571 __ jcc(Assembler::zero, slow_path); | |
572 | |
573 __ movptr(rdi, Address(rbx, methodOopDesc::constants_offset())); | |
574 // read first instruction word and extract bytecode @ 1 and index @ 2 | |
575 __ movptr(rdx, Address(rbx, methodOopDesc::const_offset())); | |
576 __ movl(rdx, Address(rdx, constMethodOopDesc::codes_offset())); | |
577 // Shift codes right to get the index on the right. | |
578 // The bytecode fetched looks like <index><0xb4><0x2a> | |
579 __ shrl(rdx, 2 * BitsPerByte); | |
580 __ shll(rdx, exact_log2(in_words(ConstantPoolCacheEntry::size()))); | |
581 __ movptr(rdi, Address(rdi, constantPoolOopDesc::cache_offset_in_bytes())); | |
582 | |
583 // rax: local 0 | |
584 // rbx: method | |
585 // rdx: constant pool cache index | |
586 // rdi: constant pool cache | |
587 | |
588 // check if getfield has been resolved and read constant pool cache entry | |
589 // check the validity of the cache entry by testing whether _indices field | |
590 // contains Bytecode::_getfield in b1 byte. | |
591 assert(in_words(ConstantPoolCacheEntry::size()) == 4, | |
592 "adjust shift below"); | |
593 __ movl(rcx, | |
594 Address(rdi, | |
595 rdx, | |
596 Address::times_8, | |
597 constantPoolCacheOopDesc::base_offset() + | |
598 ConstantPoolCacheEntry::indices_offset())); | |
599 __ shrl(rcx, 2 * BitsPerByte); | |
600 __ andl(rcx, 0xFF); | |
601 __ cmpl(rcx, Bytecodes::_getfield); | |
602 __ jcc(Assembler::notEqual, slow_path); | |
603 | |
604 // Note: constant pool entry is not valid before bytecode is resolved | |
605 __ movptr(rcx, | |
606 Address(rdi, | |
607 rdx, | |
608 Address::times_8, | |
609 constantPoolCacheOopDesc::base_offset() + | |
610 ConstantPoolCacheEntry::f2_offset())); | |
611 // edx: flags | |
612 __ movl(rdx, | |
613 Address(rdi, | |
614 rdx, | |
615 Address::times_8, | |
616 constantPoolCacheOopDesc::base_offset() + | |
617 ConstantPoolCacheEntry::flags_offset())); | |
618 | |
619 Label notObj, notInt, notByte, notShort; | |
620 const Address field_address(rax, rcx, Address::times_1); | |
621 | |
622 // Need to differentiate between igetfield, agetfield, bgetfield etc. | |
623 // because they are different sizes. | |
624 // Use the type from the constant pool cache | |
625 __ shrl(rdx, ConstantPoolCacheEntry::tosBits); | |
626 // Make sure we don't need to mask edx for tosBits after the above shift | |
627 ConstantPoolCacheEntry::verify_tosBits(); | |
628 | |
629 __ cmpl(rdx, atos); | |
630 __ jcc(Assembler::notEqual, notObj); | |
631 // atos | |
632 __ load_heap_oop(rax, field_address); | |
633 __ jmp(xreturn_path); | |
634 | |
635 __ bind(notObj); | |
636 __ cmpl(rdx, itos); | |
637 __ jcc(Assembler::notEqual, notInt); | |
638 // itos | |
639 __ movl(rax, field_address); | |
640 __ jmp(xreturn_path); | |
641 | |
642 __ bind(notInt); | |
643 __ cmpl(rdx, btos); | |
644 __ jcc(Assembler::notEqual, notByte); | |
645 // btos | |
646 __ load_signed_byte(rax, field_address); | |
647 __ jmp(xreturn_path); | |
648 | |
649 __ bind(notByte); | |
650 __ cmpl(rdx, stos); | |
651 __ jcc(Assembler::notEqual, notShort); | |
652 // stos | |
653 __ load_signed_word(rax, field_address); | |
654 __ jmp(xreturn_path); | |
655 | |
656 __ bind(notShort); | |
657 #ifdef ASSERT | |
658 Label okay; | |
659 __ cmpl(rdx, ctos); | |
660 __ jcc(Assembler::equal, okay); | |
661 __ stop("what type is this?"); | |
662 __ bind(okay); | |
663 #endif | |
664 // ctos | |
665 __ load_unsigned_word(rax, field_address); | |
666 | |
667 __ bind(xreturn_path); | |
668 | |
669 // _ireturn/_areturn | |
670 __ pop(rdi); | |
671 __ mov(rsp, r13); | |
672 __ jmp(rdi); | |
673 __ ret(0); | |
674 | |
675 // generate a vanilla interpreter entry as the slow path | |
676 __ bind(slow_path); | |
677 (void) generate_normal_entry(false); | |
678 } else { | |
679 (void) generate_normal_entry(false); | |
680 } | |
681 | |
682 return entry_point; | |
683 } | |
534 | 684 |
535 // Interpreter stub for calling a native method. (asm interpreter) | 685 // Interpreter stub for calling a native method. (asm interpreter) |
536 // This sets up a somewhat different looking stack for calling the | 686 // This sets up a somewhat different looking stack for calling the |
537 // native method than the typical interpreter frame setup. | 687 // native method than the typical interpreter frame setup. |
538 address InterpreterGenerator::generate_native_entry(bool synchronized) { | 688 address InterpreterGenerator::generate_native_entry(bool synchronized) { |
559 // we only add a handful of words to the stack | 709 // we only add a handful of words to the stack |
560 | 710 |
561 // rbx: methodOop | 711 // rbx: methodOop |
562 // rcx: size of parameters | 712 // rcx: size of parameters |
563 // r13: sender sp | 713 // r13: sender sp |
564 __ popq(rax); // get return address | 714 __ pop(rax); // get return address |
565 | 715 |
566 // for natives the size of locals is zero | 716 // for natives the size of locals is zero |
567 | 717 |
568 // compute beginning of parameters (r14) | 718 // compute beginning of parameters (r14) |
569 if (TaggedStackInterpreter) __ shll(rcx, 1); // 2 slots per parameter. | 719 if (TaggedStackInterpreter) __ shll(rcx, 1); // 2 slots per parameter. |
570 __ leaq(r14, Address(rsp, rcx, Address::times_8, -wordSize)); | 720 __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize)); |
571 | 721 |
572 // add 2 zero-initialized slots for native calls | 722 // add 2 zero-initialized slots for native calls |
573 // initialize result_handler slot | 723 // initialize result_handler slot |
574 __ pushq((int) NULL); | 724 __ push((int) NULL_WORD); |
575 // slot for oop temp | 725 // slot for oop temp |
576 // (static native method holder mirror/jni oop result) | 726 // (static native method holder mirror/jni oop result) |
577 __ pushq((int) NULL); | 727 __ push((int) NULL_WORD); |
578 | 728 |
579 if (inc_counter) { | 729 if (inc_counter) { |
580 __ movl(rcx, invocation_counter); // (pre-)fetch invocation count | 730 __ movl(rcx, invocation_counter); // (pre-)fetch invocation count |
581 } | 731 } |
582 | 732 |
649 #ifdef ASSERT | 799 #ifdef ASSERT |
650 { | 800 { |
651 Label L; | 801 Label L; |
652 const Address monitor_block_top(rbp, | 802 const Address monitor_block_top(rbp, |
653 frame::interpreter_frame_monitor_block_top_offset * wordSize); | 803 frame::interpreter_frame_monitor_block_top_offset * wordSize); |
654 __ movq(rax, monitor_block_top); | 804 __ movptr(rax, monitor_block_top); |
655 __ cmpq(rax, rsp); | 805 __ cmpptr(rax, rsp); |
656 __ jcc(Assembler::equal, L); | 806 __ jcc(Assembler::equal, L); |
657 __ stop("broken stack frame setup in interpreter"); | 807 __ stop("broken stack frame setup in interpreter"); |
658 __ bind(L); | 808 __ bind(L); |
659 } | 809 } |
660 #endif | 810 #endif |
672 __ load_unsigned_word(t, | 822 __ load_unsigned_word(t, |
673 Address(method, | 823 Address(method, |
674 methodOopDesc::size_of_parameters_offset())); | 824 methodOopDesc::size_of_parameters_offset())); |
675 __ shll(t, Interpreter::logStackElementSize()); | 825 __ shll(t, Interpreter::logStackElementSize()); |
676 | 826 |
677 __ subq(rsp, t); | 827 __ subptr(rsp, t); |
678 __ subq(rsp, frame::arg_reg_save_area_bytes); // windows | 828 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows |
679 __ andq(rsp, -16); // must be 16 byte boundry (see amd64 ABI) | 829 __ andptr(rsp, -16); // must be 16 byte boundry (see amd64 ABI) |
680 | 830 |
681 // get signature handler | 831 // get signature handler |
682 { | 832 { |
683 Label L; | 833 Label L; |
684 __ movq(t, Address(method, methodOopDesc::signature_handler_offset())); | 834 __ movptr(t, Address(method, methodOopDesc::signature_handler_offset())); |
685 __ testq(t, t); | 835 __ testptr(t, t); |
686 __ jcc(Assembler::notZero, L); | 836 __ jcc(Assembler::notZero, L); |
687 __ call_VM(noreg, | 837 __ call_VM(noreg, |
688 CAST_FROM_FN_PTR(address, | 838 CAST_FROM_FN_PTR(address, |
689 InterpreterRuntime::prepare_native_call), | 839 InterpreterRuntime::prepare_native_call), |
690 method); | 840 method); |
691 __ get_method(method); | 841 __ get_method(method); |
692 __ movq(t, Address(method, methodOopDesc::signature_handler_offset())); | 842 __ movptr(t, Address(method, methodOopDesc::signature_handler_offset())); |
693 __ bind(L); | 843 __ bind(L); |
694 } | 844 } |
695 | 845 |
696 // call signature handler | 846 // call signature handler |
697 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == r14, | 847 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == r14, |
709 __ get_method(method); // slow path can do a GC, reload RBX | 859 __ get_method(method); // slow path can do a GC, reload RBX |
710 | 860 |
711 | 861 |
712 // result handler is in rax | 862 // result handler is in rax |
713 // set result handler | 863 // set result handler |
714 __ movq(Address(rbp, | 864 __ movptr(Address(rbp, |
715 (frame::interpreter_frame_result_handler_offset) * wordSize), | 865 (frame::interpreter_frame_result_handler_offset) * wordSize), |
716 rax); | 866 rax); |
717 | 867 |
718 // pass mirror handle if static call | 868 // pass mirror handle if static call |
719 { | 869 { |
720 Label L; | 870 Label L; |
721 const int mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + | 871 const int mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + |
722 Klass::java_mirror_offset_in_bytes(); | 872 Klass::java_mirror_offset_in_bytes(); |
723 __ movl(t, Address(method, methodOopDesc::access_flags_offset())); | 873 __ movl(t, Address(method, methodOopDesc::access_flags_offset())); |
724 __ testl(t, JVM_ACC_STATIC); | 874 __ testl(t, JVM_ACC_STATIC); |
725 __ jcc(Assembler::zero, L); | 875 __ jcc(Assembler::zero, L); |
726 // get mirror | 876 // get mirror |
727 __ movq(t, Address(method, methodOopDesc::constants_offset())); | 877 __ movptr(t, Address(method, methodOopDesc::constants_offset())); |
728 __ movq(t, Address(t, constantPoolOopDesc::pool_holder_offset_in_bytes())); | 878 __ movptr(t, Address(t, constantPoolOopDesc::pool_holder_offset_in_bytes())); |
729 __ movq(t, Address(t, mirror_offset)); | 879 __ movptr(t, Address(t, mirror_offset)); |
730 // copy mirror into activation frame | 880 // copy mirror into activation frame |
731 __ movq(Address(rbp, frame::interpreter_frame_oop_temp_offset * wordSize), | 881 __ movptr(Address(rbp, frame::interpreter_frame_oop_temp_offset * wordSize), |
732 t); | 882 t); |
733 // pass handle to mirror | 883 // pass handle to mirror |
734 __ leaq(c_rarg1, | 884 __ lea(c_rarg1, |
735 Address(rbp, frame::interpreter_frame_oop_temp_offset * wordSize)); | 885 Address(rbp, frame::interpreter_frame_oop_temp_offset * wordSize)); |
736 __ bind(L); | 886 __ bind(L); |
737 } | 887 } |
738 | 888 |
739 // get native function entry point | 889 // get native function entry point |
740 { | 890 { |
741 Label L; | 891 Label L; |
742 __ movq(rax, Address(method, methodOopDesc::native_function_offset())); | 892 __ movptr(rax, Address(method, methodOopDesc::native_function_offset())); |
743 ExternalAddress unsatisfied(SharedRuntime::native_method_throw_unsatisfied_link_error_entry()); | 893 ExternalAddress unsatisfied(SharedRuntime::native_method_throw_unsatisfied_link_error_entry()); |
744 __ movptr(rscratch2, unsatisfied.addr()); | 894 __ movptr(rscratch2, unsatisfied.addr()); |
745 __ cmpq(rax, rscratch2); | 895 __ cmpptr(rax, rscratch2); |
746 __ jcc(Assembler::notEqual, L); | 896 __ jcc(Assembler::notEqual, L); |
747 __ call_VM(noreg, | 897 __ call_VM(noreg, |
748 CAST_FROM_FN_PTR(address, | 898 CAST_FROM_FN_PTR(address, |
749 InterpreterRuntime::prepare_native_call), | 899 InterpreterRuntime::prepare_native_call), |
750 method); | 900 method); |
751 __ get_method(method); | 901 __ get_method(method); |
752 __ verify_oop(method); | 902 __ verify_oop(method); |
753 __ movq(rax, Address(method, methodOopDesc::native_function_offset())); | 903 __ movptr(rax, Address(method, methodOopDesc::native_function_offset())); |
754 __ bind(L); | 904 __ bind(L); |
755 } | 905 } |
756 | 906 |
757 // pass JNIEnv | 907 // pass JNIEnv |
758 __ leaq(c_rarg0, Address(r15_thread, JavaThread::jni_environment_offset())); | 908 __ lea(c_rarg0, Address(r15_thread, JavaThread::jni_environment_offset())); |
759 | 909 |
760 // It is enough that the pc() points into the right code | 910 // It is enough that the pc() points into the right code |
761 // segment. It does not have to be the correct return pc. | 911 // segment. It does not have to be the correct return pc. |
762 __ set_last_Java_frame(rsp, rbp, (address) __ pc()); | 912 __ set_last_Java_frame(rsp, rbp, (address) __ pc()); |
763 | 913 |
784 | 934 |
785 // Depending on runtime options, either restore the MXCSR | 935 // Depending on runtime options, either restore the MXCSR |
786 // register after returning from the JNI Call or verify that | 936 // register after returning from the JNI Call or verify that |
787 // it wasn't changed during -Xcheck:jni. | 937 // it wasn't changed during -Xcheck:jni. |
788 if (RestoreMXCSROnJNICalls) { | 938 if (RestoreMXCSROnJNICalls) { |
789 __ ldmxcsr(ExternalAddress(StubRoutines::amd64::mxcsr_std())); | 939 __ ldmxcsr(ExternalAddress(StubRoutines::x86::mxcsr_std())); |
790 } | 940 } |
791 else if (CheckJNICalls) { | 941 else if (CheckJNICalls) { |
792 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, StubRoutines::amd64::verify_mxcsr_entry()))); | 942 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, StubRoutines::x86::verify_mxcsr_entry()))); |
793 } | 943 } |
794 | 944 |
795 // NOTE: The order of these pushes is known to frame::interpreter_frame_result | 945 // NOTE: The order of these pushes is known to frame::interpreter_frame_result |
796 // in order to extract the result of a method call. If the order of these | 946 // in order to extract the result of a method call. If the order of these |
797 // pushes change or anything else is added to the stack then the code in | 947 // pushes change or anything else is added to the stack then the code in |
836 // clearing _last_native_pc down below. Also can't use | 986 // clearing _last_native_pc down below. Also can't use |
837 // call_VM_leaf either as it will check to see if r13 & r14 are | 987 // call_VM_leaf either as it will check to see if r13 & r14 are |
838 // preserved and correspond to the bcp/locals pointers. So we do a | 988 // preserved and correspond to the bcp/locals pointers. So we do a |
839 // runtime call by hand. | 989 // runtime call by hand. |
840 // | 990 // |
841 __ movq(c_rarg0, r15_thread); | 991 __ mov(c_rarg0, r15_thread); |
842 __ movq(r12, rsp); // remember sp | 992 __ mov(r12, rsp); // remember sp |
843 __ subq(rsp, frame::arg_reg_save_area_bytes); // windows | 993 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows |
844 __ andq(rsp, -16); // align stack as required by ABI | 994 __ andptr(rsp, -16); // align stack as required by ABI |
845 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, JavaThread::check_special_condition_for_native_trans))); | 995 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, JavaThread::check_special_condition_for_native_trans))); |
846 __ movq(rsp, r12); // restore sp | 996 __ mov(rsp, r12); // restore sp |
847 __ reinit_heapbase(); | 997 __ reinit_heapbase(); |
848 __ bind(Continue); | 998 __ bind(Continue); |
849 } | 999 } |
850 | 1000 |
851 // change thread state | 1001 // change thread state |
853 | 1003 |
854 // reset_last_Java_frame | 1004 // reset_last_Java_frame |
855 __ reset_last_Java_frame(true, true); | 1005 __ reset_last_Java_frame(true, true); |
856 | 1006 |
857 // reset handle block | 1007 // reset handle block |
858 __ movq(t, Address(r15_thread, JavaThread::active_handles_offset())); | 1008 __ movptr(t, Address(r15_thread, JavaThread::active_handles_offset())); |
859 __ movptr(Address(t, JNIHandleBlock::top_offset_in_bytes()), NULL_WORD); | 1009 __ movptr(Address(t, JNIHandleBlock::top_offset_in_bytes()), (int32_t)NULL_WORD); |
860 | 1010 |
861 // If result is an oop unbox and store it in frame where gc will see it | 1011 // If result is an oop unbox and store it in frame where gc will see it |
862 // and result handler will pick it up | 1012 // and result handler will pick it up |
863 | 1013 |
864 { | 1014 { |
865 Label no_oop, store_result; | 1015 Label no_oop, store_result; |
866 __ lea(t, ExternalAddress(AbstractInterpreter::result_handler(T_OBJECT))); | 1016 __ lea(t, ExternalAddress(AbstractInterpreter::result_handler(T_OBJECT))); |
867 __ cmpq(t, Address(rbp, frame::interpreter_frame_result_handler_offset*wordSize)); | 1017 __ cmpptr(t, Address(rbp, frame::interpreter_frame_result_handler_offset*wordSize)); |
868 __ jcc(Assembler::notEqual, no_oop); | 1018 __ jcc(Assembler::notEqual, no_oop); |
869 // retrieve result | 1019 // retrieve result |
870 __ pop(ltos); | 1020 __ pop(ltos); |
871 __ testq(rax, rax); | 1021 __ testptr(rax, rax); |
872 __ jcc(Assembler::zero, store_result); | 1022 __ jcc(Assembler::zero, store_result); |
873 __ movq(rax, Address(rax, 0)); | 1023 __ movptr(rax, Address(rax, 0)); |
874 __ bind(store_result); | 1024 __ bind(store_result); |
875 __ movq(Address(rbp, frame::interpreter_frame_oop_temp_offset*wordSize), rax); | 1025 __ movptr(Address(rbp, frame::interpreter_frame_oop_temp_offset*wordSize), rax); |
876 // keep stack depth as expected by pushing oop which will eventually be discarde | 1026 // keep stack depth as expected by pushing oop which will eventually be discarde |
877 __ push(ltos); | 1027 __ push(ltos); |
878 __ bind(no_oop); | 1028 __ bind(no_oop); |
879 } | 1029 } |
880 | 1030 |
883 Label no_reguard; | 1033 Label no_reguard; |
884 __ cmpl(Address(r15_thread, JavaThread::stack_guard_state_offset()), | 1034 __ cmpl(Address(r15_thread, JavaThread::stack_guard_state_offset()), |
885 JavaThread::stack_guard_yellow_disabled); | 1035 JavaThread::stack_guard_yellow_disabled); |
886 __ jcc(Assembler::notEqual, no_reguard); | 1036 __ jcc(Assembler::notEqual, no_reguard); |
887 | 1037 |
888 __ pushaq(); // XXX only save smashed registers | 1038 __ pusha(); // XXX only save smashed registers |
889 __ movq(r12, rsp); // remember sp | 1039 __ mov(r12, rsp); // remember sp |
890 __ subq(rsp, frame::arg_reg_save_area_bytes); // windows | 1040 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows |
891 __ andq(rsp, -16); // align stack as required by ABI | 1041 __ andptr(rsp, -16); // align stack as required by ABI |
892 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::reguard_yellow_pages))); | 1042 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::reguard_yellow_pages))); |
893 __ movq(rsp, r12); // restore sp | 1043 __ mov(rsp, r12); // restore sp |
894 __ popaq(); // XXX only restore smashed registers | 1044 __ popa(); // XXX only restore smashed registers |
895 __ reinit_heapbase(); | 1045 __ reinit_heapbase(); |
896 | 1046 |
897 __ bind(no_reguard); | 1047 __ bind(no_reguard); |
898 } | 1048 } |
899 | 1049 |
904 __ get_method(method); | 1054 __ get_method(method); |
905 __ verify_oop(method); | 1055 __ verify_oop(method); |
906 | 1056 |
907 // restore r13 to have legal interpreter frame, i.e., bci == 0 <=> | 1057 // restore r13 to have legal interpreter frame, i.e., bci == 0 <=> |
908 // r13 == code_base() | 1058 // r13 == code_base() |
909 __ movq(r13, Address(method, methodOopDesc::const_offset())); // get constMethodOop | 1059 __ movptr(r13, Address(method, methodOopDesc::const_offset())); // get constMethodOop |
910 __ leaq(r13, Address(r13, constMethodOopDesc::codes_offset())); // get codebase | 1060 __ lea(r13, Address(r13, constMethodOopDesc::codes_offset())); // get codebase |
911 // handle exceptions (exception handling will handle unlocking!) | 1061 // handle exceptions (exception handling will handle unlocking!) |
912 { | 1062 { |
913 Label L; | 1063 Label L; |
914 __ cmpq(Address(r15_thread, Thread::pending_exception_offset()), (int) NULL); | 1064 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t) NULL_WORD); |
915 __ jcc(Assembler::zero, L); | 1065 __ jcc(Assembler::zero, L); |
916 // Note: At some point we may want to unify this with the code | 1066 // Note: At some point we may want to unify this with the code |
917 // used in call_VM_base(); i.e., we should use the | 1067 // used in call_VM_base(); i.e., we should use the |
918 // StubRoutines::forward_exception code. For now this doesn't work | 1068 // StubRoutines::forward_exception code. For now this doesn't work |
919 // here because the rsp is not correctly set at this point. | 1069 // here because the rsp is not correctly set at this point. |
940 const Address monitor(rbp, | 1090 const Address monitor(rbp, |
941 (intptr_t)(frame::interpreter_frame_initial_sp_offset * | 1091 (intptr_t)(frame::interpreter_frame_initial_sp_offset * |
942 wordSize - sizeof(BasicObjectLock))); | 1092 wordSize - sizeof(BasicObjectLock))); |
943 | 1093 |
944 // monitor expect in c_rarg1 for slow unlock path | 1094 // monitor expect in c_rarg1 for slow unlock path |
945 __ leaq(c_rarg1, monitor); // address of first monitor | 1095 __ lea(c_rarg1, monitor); // address of first monitor |
946 | 1096 |
947 __ movq(t, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes())); | 1097 __ movptr(t, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes())); |
948 __ testq(t, t); | 1098 __ testptr(t, t); |
949 __ jcc(Assembler::notZero, unlock); | 1099 __ jcc(Assembler::notZero, unlock); |
950 | 1100 |
951 // Entry already unlocked, need to throw exception | 1101 // Entry already unlocked, need to throw exception |
952 __ MacroAssembler::call_VM(noreg, | 1102 __ MacroAssembler::call_VM(noreg, |
953 CAST_FROM_FN_PTR(address, | 1103 CAST_FROM_FN_PTR(address, |
971 // restore potential result in ST0 & handle result | 1121 // restore potential result in ST0 & handle result |
972 | 1122 |
973 __ pop(ltos); | 1123 __ pop(ltos); |
974 __ pop(dtos); | 1124 __ pop(dtos); |
975 | 1125 |
976 __ movq(t, Address(rbp, | 1126 __ movptr(t, Address(rbp, |
977 (frame::interpreter_frame_result_handler_offset) * wordSize)); | 1127 (frame::interpreter_frame_result_handler_offset) * wordSize)); |
978 __ call(t); | 1128 __ call(t); |
979 | 1129 |
980 // remove activation | 1130 // remove activation |
981 __ movq(t, Address(rbp, | 1131 __ movptr(t, Address(rbp, |
982 frame::interpreter_frame_sender_sp_offset * | 1132 frame::interpreter_frame_sender_sp_offset * |
983 wordSize)); // get sender sp | 1133 wordSize)); // get sender sp |
984 __ leave(); // remove frame anchor | 1134 __ leave(); // remove frame anchor |
985 __ popq(rdi); // get return address | 1135 __ pop(rdi); // get return address |
986 __ movq(rsp, t); // set sp to sender sp | 1136 __ mov(rsp, t); // set sp to sender sp |
987 __ jmp(rdi); | 1137 __ jmp(rdi); |
988 | 1138 |
989 if (inc_counter) { | 1139 if (inc_counter) { |
990 // Handle overflow of counter and compile method | 1140 // Handle overflow of counter and compile method |
991 __ bind(invocation_counter_overflow); | 1141 __ bind(invocation_counter_overflow); |
1030 | 1180 |
1031 // see if we've got enough room on the stack for locals plus overhead. | 1181 // see if we've got enough room on the stack for locals plus overhead. |
1032 generate_stack_overflow_check(); | 1182 generate_stack_overflow_check(); |
1033 | 1183 |
1034 // get return address | 1184 // get return address |
1035 __ popq(rax); | 1185 __ pop(rax); |
1036 | 1186 |
1037 // compute beginning of parameters (r14) | 1187 // compute beginning of parameters (r14) |
1038 if (TaggedStackInterpreter) __ shll(rcx, 1); // 2 slots per parameter. | 1188 if (TaggedStackInterpreter) __ shll(rcx, 1); // 2 slots per parameter. |
1039 __ leaq(r14, Address(rsp, rcx, Address::times_8, -wordSize)); | 1189 __ lea(r14, Address(rsp, rcx, Address::times_8, -wordSize)); |
1040 | 1190 |
1041 // rdx - # of additional locals | 1191 // rdx - # of additional locals |
1042 // allocate space for locals | 1192 // allocate space for locals |
1043 // explicitly initialize locals | 1193 // explicitly initialize locals |
1044 { | 1194 { |
1045 Label exit, loop; | 1195 Label exit, loop; |
1046 __ testl(rdx, rdx); | 1196 __ testl(rdx, rdx); |
1047 __ jcc(Assembler::lessEqual, exit); // do nothing if rdx <= 0 | 1197 __ jcc(Assembler::lessEqual, exit); // do nothing if rdx <= 0 |
1048 __ bind(loop); | 1198 __ bind(loop); |
1049 if (TaggedStackInterpreter) __ pushq((int) NULL); // push tag | 1199 if (TaggedStackInterpreter) __ push((int) NULL_WORD); // push tag |
1050 __ pushq((int) NULL); // initialize local variables | 1200 __ push((int) NULL_WORD); // initialize local variables |
1051 __ decrementl(rdx); // until everything initialized | 1201 __ decrementl(rdx); // until everything initialized |
1052 __ jcc(Assembler::greater, loop); | 1202 __ jcc(Assembler::greater, loop); |
1053 __ bind(exit); | 1203 __ bind(exit); |
1054 } | 1204 } |
1055 | 1205 |
1135 #ifdef ASSERT | 1285 #ifdef ASSERT |
1136 { | 1286 { |
1137 Label L; | 1287 Label L; |
1138 const Address monitor_block_top (rbp, | 1288 const Address monitor_block_top (rbp, |
1139 frame::interpreter_frame_monitor_block_top_offset * wordSize); | 1289 frame::interpreter_frame_monitor_block_top_offset * wordSize); |
1140 __ movq(rax, monitor_block_top); | 1290 __ movptr(rax, monitor_block_top); |
1141 __ cmpq(rax, rsp); | 1291 __ cmpptr(rax, rsp); |
1142 __ jcc(Assembler::equal, L); | 1292 __ jcc(Assembler::equal, L); |
1143 __ stop("broken stack frame setup in interpreter"); | 1293 __ stop("broken stack frame setup in interpreter"); |
1144 __ bind(L); | 1294 __ bind(L); |
1145 } | 1295 } |
1146 #endif | 1296 #endif |
1158 | 1308 |
1159 __ call_VM(noreg, | 1309 __ call_VM(noreg, |
1160 CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method), | 1310 CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method), |
1161 r13, true); | 1311 r13, true); |
1162 | 1312 |
1163 __ movq(rbx, Address(rbp, method_offset)); // restore methodOop | 1313 __ movptr(rbx, Address(rbp, method_offset)); // restore methodOop |
1164 __ movq(rax, Address(rbx, | 1314 __ movptr(rax, Address(rbx, |
1165 in_bytes(methodOopDesc::method_data_offset()))); | 1315 in_bytes(methodOopDesc::method_data_offset()))); |
1166 __ movq(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), | 1316 __ movptr(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), |
1167 rax); | 1317 rax); |
1168 __ test_method_data_pointer(rax, profile_method_continue); | 1318 __ test_method_data_pointer(rax, profile_method_continue); |
1169 __ addq(rax, in_bytes(methodDataOopDesc::data_offset())); | 1319 __ addptr(rax, in_bytes(methodDataOopDesc::data_offset())); |
1170 __ movq(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), | 1320 __ movptr(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), |
1171 rax); | 1321 rax); |
1172 __ jmp(profile_method_continue); | 1322 __ jmp(profile_method_continue); |
1173 } | 1323 } |
1174 // Handle overflow of counter and compile method | 1324 // Handle overflow of counter and compile method |
1175 __ bind(invocation_counter_overflow); | 1325 __ bind(invocation_counter_overflow); |
1355 // Entry point in previous activation (i.e., if the caller was | 1505 // Entry point in previous activation (i.e., if the caller was |
1356 // interpreted) | 1506 // interpreted) |
1357 Interpreter::_rethrow_exception_entry = __ pc(); | 1507 Interpreter::_rethrow_exception_entry = __ pc(); |
1358 // Restore sp to interpreter_frame_last_sp even though we are going | 1508 // Restore sp to interpreter_frame_last_sp even though we are going |
1359 // to empty the expression stack for the exception processing. | 1509 // to empty the expression stack for the exception processing. |
1360 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD); | 1510 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD); |
1361 // rax: exception | 1511 // rax: exception |
1362 // rdx: return address/pc that threw exception | 1512 // rdx: return address/pc that threw exception |
1363 __ restore_bcp(); // r13 points to call/send | 1513 __ restore_bcp(); // r13 points to call/send |
1364 __ restore_locals(); | 1514 __ restore_locals(); |
1365 __ reinit_heapbase(); // restore r12 as heapbase. | 1515 __ reinit_heapbase(); // restore r12 as heapbase. |
1367 Interpreter::_throw_exception_entry = __ pc(); | 1517 Interpreter::_throw_exception_entry = __ pc(); |
1368 // expression stack is undefined here | 1518 // expression stack is undefined here |
1369 // rax: exception | 1519 // rax: exception |
1370 // r13: exception bcp | 1520 // r13: exception bcp |
1371 __ verify_oop(rax); | 1521 __ verify_oop(rax); |
1372 __ movq(c_rarg1, rax); | 1522 __ mov(c_rarg1, rax); |
1373 | 1523 |
1374 // expression stack must be empty before entering the VM in case of | 1524 // expression stack must be empty before entering the VM in case of |
1375 // an exception | 1525 // an exception |
1376 __ empty_expression_stack(); | 1526 __ empty_expression_stack(); |
1377 // find exception handler address and preserve exception oop | 1527 // find exception handler address and preserve exception oop |
1422 // | 1572 // |
1423 // Note that we don't compare the return PC against the | 1573 // Note that we don't compare the return PC against the |
1424 // deoptimization blob's unpack entry because of the presence of | 1574 // deoptimization blob's unpack entry because of the presence of |
1425 // adapter frames in C2. | 1575 // adapter frames in C2. |
1426 Label caller_not_deoptimized; | 1576 Label caller_not_deoptimized; |
1427 __ movq(c_rarg1, Address(rbp, frame::return_addr_offset * wordSize)); | 1577 __ movptr(c_rarg1, Address(rbp, frame::return_addr_offset * wordSize)); |
1428 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, | 1578 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, |
1429 InterpreterRuntime::interpreter_contains), c_rarg1); | 1579 InterpreterRuntime::interpreter_contains), c_rarg1); |
1430 __ testl(rax, rax); | 1580 __ testl(rax, rax); |
1431 __ jcc(Assembler::notZero, caller_not_deoptimized); | 1581 __ jcc(Assembler::notZero, caller_not_deoptimized); |
1432 | 1582 |
1435 __ get_method(rax); | 1585 __ get_method(rax); |
1436 __ load_unsigned_word(rax, Address(rax, in_bytes(methodOopDesc:: | 1586 __ load_unsigned_word(rax, Address(rax, in_bytes(methodOopDesc:: |
1437 size_of_parameters_offset()))); | 1587 size_of_parameters_offset()))); |
1438 __ shll(rax, Interpreter::logStackElementSize()); | 1588 __ shll(rax, Interpreter::logStackElementSize()); |
1439 __ restore_locals(); // XXX do we need this? | 1589 __ restore_locals(); // XXX do we need this? |
1440 __ subq(r14, rax); | 1590 __ subptr(r14, rax); |
1441 __ addq(r14, wordSize); | 1591 __ addptr(r14, wordSize); |
1442 // Save these arguments | 1592 // Save these arguments |
1443 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, | 1593 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, |
1444 Deoptimization:: | 1594 Deoptimization:: |
1445 popframe_preserve_args), | 1595 popframe_preserve_args), |
1446 r15_thread, rax, r14); | 1596 r15_thread, rax, r14); |
1475 // no space between the top of the expression stack (current | 1625 // no space between the top of the expression stack (current |
1476 // last_sp) and the top of stack. Rather than force deopt to | 1626 // last_sp) and the top of stack. Rather than force deopt to |
1477 // maintain this kind of invariant all the time we call a small | 1627 // maintain this kind of invariant all the time we call a small |
1478 // fixup routine to move the mutated arguments onto the top of our | 1628 // fixup routine to move the mutated arguments onto the top of our |
1479 // expression stack if necessary. | 1629 // expression stack if necessary. |
1480 __ movq(c_rarg1, rsp); | 1630 __ mov(c_rarg1, rsp); |
1481 __ movq(c_rarg2, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize)); | 1631 __ movptr(c_rarg2, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize)); |
1482 // PC must point into interpreter here | 1632 // PC must point into interpreter here |
1483 __ set_last_Java_frame(noreg, rbp, __ pc()); | 1633 __ set_last_Java_frame(noreg, rbp, __ pc()); |
1484 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::popframe_move_outgoing_args), r15_thread, c_rarg1, c_rarg2); | 1634 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::popframe_move_outgoing_args), r15_thread, c_rarg1, c_rarg2); |
1485 __ reset_last_Java_frame(true, true); | 1635 __ reset_last_Java_frame(true, true); |
1486 // Restore the last_sp and null it out | 1636 // Restore the last_sp and null it out |
1487 __ movq(rsp, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize)); | 1637 __ movptr(rsp, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize)); |
1488 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD); | 1638 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD); |
1489 | 1639 |
1490 __ restore_bcp(); // XXX do we need this? | 1640 __ restore_bcp(); // XXX do we need this? |
1491 __ restore_locals(); // XXX do we need this? | 1641 __ restore_locals(); // XXX do we need this? |
1492 // The method data pointer was incremented already during | 1642 // The method data pointer was incremented already during |
1493 // call profiling. We have to restore the mdp for the current bcp. | 1643 // call profiling. We have to restore the mdp for the current bcp. |
1504 | 1654 |
1505 Interpreter::_remove_activation_entry = __ pc(); | 1655 Interpreter::_remove_activation_entry = __ pc(); |
1506 | 1656 |
1507 // preserve exception over this code sequence | 1657 // preserve exception over this code sequence |
1508 __ pop_ptr(rax); | 1658 __ pop_ptr(rax); |
1509 __ movq(Address(r15_thread, JavaThread::vm_result_offset()), rax); | 1659 __ movptr(Address(r15_thread, JavaThread::vm_result_offset()), rax); |
1510 // remove the activation (without doing throws on illegalMonitorExceptions) | 1660 // remove the activation (without doing throws on illegalMonitorExceptions) |
1511 __ remove_activation(vtos, rdx, false, true, false); | 1661 __ remove_activation(vtos, rdx, false, true, false); |
1512 // restore exception | 1662 // restore exception |
1513 __ movq(rax, Address(r15_thread, JavaThread::vm_result_offset())); | 1663 __ movptr(rax, Address(r15_thread, JavaThread::vm_result_offset())); |
1514 __ movptr(Address(r15_thread, JavaThread::vm_result_offset()), NULL_WORD); | 1664 __ movptr(Address(r15_thread, JavaThread::vm_result_offset()), (int32_t)NULL_WORD); |
1515 __ verify_oop(rax); | 1665 __ verify_oop(rax); |
1516 | 1666 |
1517 // In between activations - previous activation type unknown yet | 1667 // In between activations - previous activation type unknown yet |
1518 // compute continuation point - the continuation point expects the | 1668 // compute continuation point - the continuation point expects the |
1519 // following registers set up: | 1669 // following registers set up: |
1520 // | 1670 // |
1521 // rax: exception | 1671 // rax: exception |
1522 // rdx: return address/pc that threw exception | 1672 // rdx: return address/pc that threw exception |
1523 // rsp: expression stack of caller | 1673 // rsp: expression stack of caller |
1524 // rbp: ebp of caller | 1674 // rbp: ebp of caller |
1525 __ pushq(rax); // save exception | 1675 __ push(rax); // save exception |
1526 __ pushq(rdx); // save return address | 1676 __ push(rdx); // save return address |
1527 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, | 1677 __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, |
1528 SharedRuntime::exception_handler_for_return_address), | 1678 SharedRuntime::exception_handler_for_return_address), |
1529 rdx); | 1679 rdx); |
1530 __ movq(rbx, rax); // save exception handler | 1680 __ mov(rbx, rax); // save exception handler |
1531 __ popq(rdx); // restore return address | 1681 __ pop(rdx); // restore return address |
1532 __ popq(rax); // restore exception | 1682 __ pop(rax); // restore exception |
1533 // Note that an "issuing PC" is actually the next PC after the call | 1683 // Note that an "issuing PC" is actually the next PC after the call |
1534 __ jmp(rbx); // jump to exception | 1684 __ jmp(rbx); // jump to exception |
1535 // handler of caller | 1685 // handler of caller |
1536 } | 1686 } |
1537 | 1687 |
1545 __ restore_bcp(); | 1695 __ restore_bcp(); |
1546 __ restore_locals(); | 1696 __ restore_locals(); |
1547 __ empty_expression_stack(); | 1697 __ empty_expression_stack(); |
1548 __ load_earlyret_value(state); | 1698 __ load_earlyret_value(state); |
1549 | 1699 |
1550 __ movq(rdx, Address(r15_thread, JavaThread::jvmti_thread_state_offset())); | 1700 __ movptr(rdx, Address(r15_thread, JavaThread::jvmti_thread_state_offset())); |
1551 Address cond_addr(rdx, JvmtiThreadState::earlyret_state_offset()); | 1701 Address cond_addr(rdx, JvmtiThreadState::earlyret_state_offset()); |
1552 | 1702 |
1553 // Clear the earlyret state | 1703 // Clear the earlyret state |
1554 __ movl(cond_addr, JvmtiThreadState::earlyret_inactive); | 1704 __ movl(cond_addr, JvmtiThreadState::earlyret_inactive); |
1555 | 1705 |
1607 #ifndef PRODUCT | 1757 #ifndef PRODUCT |
1608 address TemplateInterpreterGenerator::generate_trace_code(TosState state) { | 1758 address TemplateInterpreterGenerator::generate_trace_code(TosState state) { |
1609 address entry = __ pc(); | 1759 address entry = __ pc(); |
1610 | 1760 |
1611 __ push(state); | 1761 __ push(state); |
1612 __ pushq(c_rarg0); | 1762 __ push(c_rarg0); |
1613 __ pushq(c_rarg1); | 1763 __ push(c_rarg1); |
1614 __ pushq(c_rarg2); | 1764 __ push(c_rarg2); |
1615 __ pushq(c_rarg3); | 1765 __ push(c_rarg3); |
1616 __ movq(c_rarg2, rax); // Pass itos | 1766 __ mov(c_rarg2, rax); // Pass itos |
1617 #ifdef _WIN64 | 1767 #ifdef _WIN64 |
1618 __ movflt(xmm3, xmm0); // Pass ftos | 1768 __ movflt(xmm3, xmm0); // Pass ftos |
1619 #endif | 1769 #endif |
1620 __ call_VM(noreg, | 1770 __ call_VM(noreg, |
1621 CAST_FROM_FN_PTR(address, SharedRuntime::trace_bytecode), | 1771 CAST_FROM_FN_PTR(address, SharedRuntime::trace_bytecode), |
1622 c_rarg1, c_rarg2, c_rarg3); | 1772 c_rarg1, c_rarg2, c_rarg3); |
1623 __ popq(c_rarg3); | 1773 __ pop(c_rarg3); |
1624 __ popq(c_rarg2); | 1774 __ pop(c_rarg2); |
1625 __ popq(c_rarg1); | 1775 __ pop(c_rarg1); |
1626 __ popq(c_rarg0); | 1776 __ pop(c_rarg0); |
1627 __ pop(state); | 1777 __ pop(state); |
1628 __ ret(0); // return from result handler | 1778 __ ret(0); // return from result handler |
1629 | 1779 |
1630 return entry; | 1780 return entry; |
1631 } | 1781 } |
1655 // The run-time runtime saves the right registers, depending on | 1805 // The run-time runtime saves the right registers, depending on |
1656 // the tosca in-state for the given template. | 1806 // the tosca in-state for the given template. |
1657 | 1807 |
1658 assert(Interpreter::trace_code(t->tos_in()) != NULL, | 1808 assert(Interpreter::trace_code(t->tos_in()) != NULL, |
1659 "entry must have been generated"); | 1809 "entry must have been generated"); |
1660 __ movq(r12, rsp); // remember sp | 1810 __ mov(r12, rsp); // remember sp |
1661 __ andq(rsp, -16); // align stack as required by ABI | 1811 __ andptr(rsp, -16); // align stack as required by ABI |
1662 __ call(RuntimeAddress(Interpreter::trace_code(t->tos_in()))); | 1812 __ call(RuntimeAddress(Interpreter::trace_code(t->tos_in()))); |
1663 __ movq(rsp, r12); // restore sp | 1813 __ mov(rsp, r12); // restore sp |
1664 __ reinit_heapbase(); | 1814 __ reinit_heapbase(); |
1665 } | 1815 } |
1666 | 1816 |
1667 | 1817 |
1668 void TemplateInterpreterGenerator::stop_interpreter_at() { | 1818 void TemplateInterpreterGenerator::stop_interpreter_at() { |
1672 __ jcc(Assembler::notEqual, L); | 1822 __ jcc(Assembler::notEqual, L); |
1673 __ int3(); | 1823 __ int3(); |
1674 __ bind(L); | 1824 __ bind(L); |
1675 } | 1825 } |
1676 #endif // !PRODUCT | 1826 #endif // !PRODUCT |
1827 #endif // ! CC_INTERP |