comparison src/cpu/x86/vm/x86.ad @ 17809:a433eb716ce1

8037821: Account for trampoline stubs when estimating code buffer sizes Summary: Take into account space needed for "trampoline code" used by calls on PPC64. Reviewed-by: kvn Contributed-by: lutz.schmidt@sap.com
author goetz
date Tue, 25 Mar 2014 12:54:21 -0700
parents 04d32e7fad07
children 52b4284cb496
comparison
equal deleted inserted replaced
17808:d623bc507723 17809:a433eb716ce1
472 #endif 472 #endif
473 ); 473 );
474 474
475 %} 475 %}
476 476
477
478 //----------SOURCE BLOCK-------------------------------------------------------
479 // This is a block of C++ code which provides values, functions, and
480 // definitions necessary in the rest of the architecture description
481
482 source_hpp %{
483 // Header information of the source block.
484 // Method declarations/definitions which are used outside
485 // the ad-scope can conveniently be defined here.
486 //
487 // To keep related declarations/definitions/uses close together,
488 // we switch between source %{ }% and source_hpp %{ }% freely as needed.
489
490 class CallStubImpl {
491
492 //--------------------------------------------------------------
493 //---< Used for optimization in Compile::shorten_branches >---
494 //--------------------------------------------------------------
495
496 public:
497 // Size of call trampoline stub.
498 static uint size_call_trampoline() {
499 return 0; // no call trampolines on this platform
500 }
501
502 // number of relocations needed by a call trampoline stub
503 static uint reloc_call_trampoline() {
504 return 0; // no call trampolines on this platform
505 }
506 };
507
508 class HandlerImpl {
509
510 public:
511
512 static int emit_exception_handler(CodeBuffer &cbuf);
513 static int emit_deopt_handler(CodeBuffer& cbuf);
514
515 static uint size_exception_handler() {
516 // NativeCall instruction size is the same as NativeJump.
517 // exception handler starts out as jump and can be patched to
518 // a call be deoptimization. (4932387)
519 // Note that this value is also credited (in output.cpp) to
520 // the size of the code section.
521 return NativeJump::instruction_size;
522 }
523
524 #ifdef _LP64
525 static uint size_deopt_handler() {
526 // three 5 byte instructions
527 return 15;
528 }
529 #else
530 static uint size_deopt_handler() {
531 // NativeCall instruction size is the same as NativeJump.
532 // exception handler starts out as jump and can be patched to
533 // a call be deoptimization. (4932387)
534 // Note that this value is also credited (in output.cpp) to
535 // the size of the code section.
536 return 5 + NativeJump::instruction_size; // pushl(); jmp;
537 }
538 #endif
539 };
540
541 %} // end source_hpp
542
477 source %{ 543 source %{
544
545 // Emit exception handler code.
546 // Stuff framesize into a register and call a VM stub routine.
547 int HandlerImpl::emit_exception_handler(CodeBuffer& cbuf) {
548
549 // Note that the code buffer's insts_mark is always relative to insts.
550 // That's why we must use the macroassembler to generate a handler.
551 MacroAssembler _masm(&cbuf);
552 address base = __ start_a_stub(size_exception_handler());
553 if (base == NULL) return 0; // CodeBuffer::expand failed
554 int offset = __ offset();
555 __ jump(RuntimeAddress(OptoRuntime::exception_blob()->entry_point()));
556 assert(__ offset() - offset <= (int) size_exception_handler(), "overflow");
557 __ end_a_stub();
558 return offset;
559 }
560
561 // Emit deopt handler code.
562 int HandlerImpl::emit_deopt_handler(CodeBuffer& cbuf) {
563
564 // Note that the code buffer's insts_mark is always relative to insts.
565 // That's why we must use the macroassembler to generate a handler.
566 MacroAssembler _masm(&cbuf);
567 address base = __ start_a_stub(size_deopt_handler());
568 if (base == NULL) return 0; // CodeBuffer::expand failed
569 int offset = __ offset();
570
571 #ifdef _LP64
572 address the_pc = (address) __ pc();
573 Label next;
574 // push a "the_pc" on the stack without destroying any registers
575 // as they all may be live.
576
577 // push address of "next"
578 __ call(next, relocInfo::none); // reloc none is fine since it is a disp32
579 __ bind(next);
580 // adjust it so it matches "the_pc"
581 __ subptr(Address(rsp, 0), __ offset() - offset);
582 #else
583 InternalAddress here(__ pc());
584 __ pushptr(here.addr());
585 #endif
586
587 __ jump(RuntimeAddress(SharedRuntime::deopt_blob()->unpack()));
588 assert(__ offset() - offset <= (int) size_deopt_handler(), "overflow");
589 __ end_a_stub();
590 return offset;
591 }
592
593
594 //=============================================================================
595
478 // Float masks come from different places depending on platform. 596 // Float masks come from different places depending on platform.
479 #ifdef _LP64 597 #ifdef _LP64
480 static address float_signmask() { return StubRoutines::x86::float_sign_mask(); } 598 static address float_signmask() { return StubRoutines::x86::float_sign_mask(); }
481 static address float_signflip() { return StubRoutines::x86::float_sign_flip(); } 599 static address float_signflip() { return StubRoutines::x86::float_sign_flip(); }
482 static address double_signmask() { return StubRoutines::x86::double_sign_mask(); } 600 static address double_signmask() { return StubRoutines::x86::double_sign_mask(); }