comparison src/cpu/sparc/vm/sparc.ad @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 8c92982cbbc4
children 7eca5de9e0b6
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
547 547
548 int MachCallDynamicJavaNode::ret_addr_offset() { 548 int MachCallDynamicJavaNode::ret_addr_offset() {
549 int vtable_index = this->_vtable_index; 549 int vtable_index = this->_vtable_index;
550 if (vtable_index < 0) { 550 if (vtable_index < 0) {
551 // must be invalid_vtable_index, not nonvirtual_vtable_index 551 // must be invalid_vtable_index, not nonvirtual_vtable_index
552 assert(vtable_index == methodOopDesc::invalid_vtable_index, "correct sentinel value"); 552 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
553 return (NativeMovConstReg::instruction_size + 553 return (NativeMovConstReg::instruction_size +
554 NativeCall::instruction_size); // sethi; setlo; call; delay slot 554 NativeCall::instruction_size); // sethi; setlo; call; delay slot
555 } else { 555 } else {
556 assert(!UseInlineCaches, "expect vtable calls only if not using ICs"); 556 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
557 int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size(); 557 int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
558 int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes(); 558 int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
559 int klass_load_size; 559 int klass_load_size;
560 if (UseCompressedOops) { 560 if (UseCompressedOops && UseCompressedKlassPointers) {
561 assert(Universe::heap() != NULL, "java heap should be initialized"); 561 assert(Universe::heap() != NULL, "java heap should be initialized");
562 if (Universe::narrow_oop_base() == NULL) 562 if (Universe::narrow_oop_base() == NULL)
563 klass_load_size = 2*BytesPerInstWord; // see MacroAssembler::load_klass() 563 klass_load_size = 2*BytesPerInstWord; // see MacroAssembler::load_klass()
564 else 564 else
565 klass_load_size = 3*BytesPerInstWord; 565 klass_load_size = 3*BytesPerInstWord;
1674 if (base == NULL) return; // CodeBuffer::expand failed 1674 if (base == NULL) return; // CodeBuffer::expand failed
1675 1675
1676 // static stub relocation stores the instruction address of the call 1676 // static stub relocation stores the instruction address of the call
1677 __ relocate(static_stub_Relocation::spec(mark)); 1677 __ relocate(static_stub_Relocation::spec(mark));
1678 1678
1679 __ set_oop(NULL, reg_to_register_object(Matcher::inline_cache_reg_encode())); 1679 __ set_metadata(NULL, reg_to_register_object(Matcher::inline_cache_reg_encode()));
1680 1680
1681 __ set_inst_mark(); 1681 __ set_inst_mark();
1682 AddressLiteral addrlit(-1); 1682 AddressLiteral addrlit(-1);
1683 __ JUMP(addrlit, G3, 0); 1683 __ JUMP(addrlit, G3, 0);
1684 1684
1849 } 1849 }
1850 1850
1851 #ifdef ASSERT 1851 #ifdef ASSERT
1852 address last_rethrow = NULL; // debugging aid for Rethrow encoding 1852 address last_rethrow = NULL; // debugging aid for Rethrow encoding
1853 #endif 1853 #endif
1854
1855 // Map Types to machine register types
1856 const int Matcher::base2reg[Type::lastype] = {
1857 Node::NotAMachineReg,0,0, Op_RegI, Op_RegL, 0, Op_RegN,
1858 Node::NotAMachineReg, Node::NotAMachineReg, /* tuple, array */
1859 0, Op_RegD, 0, 0, /* Vectors */
1860 Op_RegP, Op_RegP, Op_RegP, Op_RegP, Op_RegP, Op_RegP, /* the pointers */
1861 0, 0/*abio*/,
1862 Op_RegP /* Return address */, 0, /* the memories */
1863 Op_RegF, Op_RegF, Op_RegF, Op_RegD, Op_RegD, Op_RegD,
1864 0 /*bottom*/
1865 };
1866 1854
1867 // Vector width in bytes 1855 // Vector width in bytes
1868 const int Matcher::vector_width_in_bytes(BasicType bt) { 1856 const int Matcher::vector_width_in_bytes(BasicType bt) {
1869 assert(MaxVectorSize == 8, ""); 1857 assert(MaxVectorSize == 8, "");
1870 return 8; 1858 return 8;
2588 __ set_inst_mark(); 2576 __ set_inst_mark();
2589 int vtable_index = this->_vtable_index; 2577 int vtable_index = this->_vtable_index;
2590 // MachCallDynamicJavaNode::ret_addr_offset uses this same test 2578 // MachCallDynamicJavaNode::ret_addr_offset uses this same test
2591 if (vtable_index < 0) { 2579 if (vtable_index < 0) {
2592 // must be invalid_vtable_index, not nonvirtual_vtable_index 2580 // must be invalid_vtable_index, not nonvirtual_vtable_index
2593 assert(vtable_index == methodOopDesc::invalid_vtable_index, "correct sentinel value"); 2581 assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value");
2594 Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode()); 2582 Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode());
2595 assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()"); 2583 assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()");
2596 assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub"); 2584 assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub");
2597 // !!!!! 2585 __ ic_call((address)$meth$$method);
2598 // Generate "set 0x01, R_G5", placeholder instruction to load oop-info
2599 // emit_call_dynamic_prologue( cbuf );
2600 __ set_oop((jobject)Universe::non_oop_word(), G5_ic_reg);
2601
2602 address virtual_call_oop_addr = __ inst_mark();
2603 // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
2604 // who we intended to call.
2605 __ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr));
2606 emit_call_reloc(cbuf, $meth$$method, relocInfo::none);
2607 } else { 2586 } else {
2608 assert(!UseInlineCaches, "expect vtable calls only if not using ICs"); 2587 assert(!UseInlineCaches, "expect vtable calls only if not using ICs");
2609 // Just go thru the vtable 2588 // Just go thru the vtable
2610 // get receiver klass (receiver already checked for non-null) 2589 // get receiver klass (receiver already checked for non-null)
2611 // If we end up going thru a c2i adapter interpreter expects method in G5 2590 // If we end up going thru a c2i adapter interpreter expects method in G5
2612 int off = __ offset(); 2591 int off = __ offset();
2613 __ load_klass(O0, G3_scratch); 2592 __ load_klass(O0, G3_scratch);
2614 int klass_load_size; 2593 int klass_load_size;
2615 if (UseCompressedOops) { 2594 if (UseCompressedOops && UseCompressedKlassPointers) {
2616 assert(Universe::heap() != NULL, "java heap should be initialized"); 2595 assert(Universe::heap() != NULL, "java heap should be initialized");
2617 if (Universe::narrow_oop_base() == NULL) 2596 if (Universe::narrow_oop_base() == NULL)
2618 klass_load_size = 2*BytesPerInstWord; 2597 klass_load_size = 2*BytesPerInstWord;
2619 else 2598 else
2620 klass_load_size = 3*BytesPerInstWord; 2599 klass_load_size = 3*BytesPerInstWord;
2621 } else { 2600 } else {
2622 klass_load_size = 1*BytesPerInstWord; 2601 klass_load_size = 1*BytesPerInstWord;
2623 } 2602 }
2624 int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size(); 2603 int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
2625 int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes(); 2604 int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
2626 if (Assembler::is_simm13(v_off)) { 2605 if (Assembler::is_simm13(v_off)) {
2627 __ ld_ptr(G3, v_off, G5_method); 2606 __ ld_ptr(G3, v_off, G5_method);
2628 } else { 2607 } else {
2629 // Generate 2 instructions 2608 // Generate 2 instructions
2635 __ ld_ptr(G3, G5_method, G5_method); 2614 __ ld_ptr(G3, G5_method, G5_method);
2636 } 2615 }
2637 // NOTE: for vtable dispatches, the vtable entry will never be null. 2616 // NOTE: for vtable dispatches, the vtable entry will never be null.
2638 // However it may very well end up in handle_wrong_method if the 2617 // However it may very well end up in handle_wrong_method if the
2639 // method is abstract for the particular class. 2618 // method is abstract for the particular class.
2640 __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch); 2619 __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch);
2641 // jump to target (either compiled code or c2iadapter) 2620 // jump to target (either compiled code or c2iadapter)
2642 __ jmpl(G3_scratch, G0, O7); 2621 __ jmpl(G3_scratch, G0, O7);
2643 __ delayed()->nop(); 2622 __ delayed()->nop();
2644 } 2623 }
2645 %} 2624 %}
2651 Register temp_reg = G3; // caller must kill G3! We cannot reuse G5_ic_reg here because 2630 Register temp_reg = G3; // caller must kill G3! We cannot reuse G5_ic_reg here because
2652 // we might be calling a C2I adapter which needs it. 2631 // we might be calling a C2I adapter which needs it.
2653 2632
2654 assert(temp_reg != G5_ic_reg, "conflicting registers"); 2633 assert(temp_reg != G5_ic_reg, "conflicting registers");
2655 // Load nmethod 2634 // Load nmethod
2656 __ ld_ptr(G5_ic_reg, in_bytes(methodOopDesc::from_compiled_offset()), temp_reg); 2635 __ ld_ptr(G5_ic_reg, in_bytes(Method::from_compiled_offset()), temp_reg);
2657 2636
2658 // CALL to compiled java, indirect the contents of G3 2637 // CALL to compiled java, indirect the contents of G3
2659 __ set_inst_mark(); 2638 __ set_inst_mark();
2660 __ callr(temp_reg, G0); 2639 __ callr(temp_reg, G0);
2661 __ delayed()->nop(); 2640 __ delayed()->nop();
3217 // What direction does stack grow in (assumed to be same for native & Java) 3196 // What direction does stack grow in (assumed to be same for native & Java)
3218 stack_direction(TOWARDS_LOW); 3197 stack_direction(TOWARDS_LOW);
3219 3198
3220 // These two registers define part of the calling convention 3199 // These two registers define part of the calling convention
3221 // between compiled code and the interpreter. 3200 // between compiled code and the interpreter.
3222 inline_cache_reg(R_G5); // Inline Cache Register or methodOop for I2C 3201 inline_cache_reg(R_G5); // Inline Cache Register or Method* for I2C
3223 interpreter_method_oop_reg(R_G5); // Method Oop Register when calling interpreter 3202 interpreter_method_oop_reg(R_G5); // Method Oop Register when calling interpreter
3224 3203
3225 // Optional: name the operand used by cisc-spilling to access [stack_pointer + offset] 3204 // Optional: name the operand used by cisc-spilling to access [stack_pointer + offset]
3226 cisc_spilling_operand_name(indOffset); 3205 cisc_spilling_operand_name(indOffset);
3227 3206
6068 instruct loadConP(iRegP dst, immP con) %{ 6047 instruct loadConP(iRegP dst, immP con) %{
6069 match(Set dst con); 6048 match(Set dst con);
6070 ins_cost(DEFAULT_COST * 3/2); 6049 ins_cost(DEFAULT_COST * 3/2);
6071 format %{ "SET $con,$dst\t!ptr" %} 6050 format %{ "SET $con,$dst\t!ptr" %}
6072 ins_encode %{ 6051 ins_encode %{
6073 // [RGV] This next line should be generated from ADLC 6052 relocInfo::relocType constant_reloc = _opnds[1]->constant_reloc();
6074 if (_opnds[1]->constant_is_oop()) {
6075 intptr_t val = $con$$constant; 6053 intptr_t val = $con$$constant;
6054 if (constant_reloc == relocInfo::oop_type) {
6076 __ set_oop_constant((jobject) val, $dst$$Register); 6055 __ set_oop_constant((jobject) val, $dst$$Register);
6056 } else if (constant_reloc == relocInfo::metadata_type) {
6057 __ set_metadata_constant((Metadata*)val, $dst$$Register);
6077 } else { // non-oop pointers, e.g. card mark base, heap top 6058 } else { // non-oop pointers, e.g. card mark base, heap top
6078 __ set($con$$constant, $dst$$Register); 6059 assert(constant_reloc == relocInfo::none, "unexpected reloc type");
6060 __ set(val, $dst$$Register);
6079 } 6061 }
6080 %} 6062 %}
6081 ins_pipe(loadConP); 6063 ins_pipe(loadConP);
6082 %} 6064 %}
6083 #else 6065 #else
6084 instruct loadConP_set(iRegP dst, immP_set con) %{ 6066 instruct loadConP_set(iRegP dst, immP_set con) %{
6085 match(Set dst con); 6067 match(Set dst con);
6086 ins_cost(DEFAULT_COST * 3/2); 6068 ins_cost(DEFAULT_COST * 3/2);
6087 format %{ "SET $con,$dst\t! ptr" %} 6069 format %{ "SET $con,$dst\t! ptr" %}
6088 ins_encode %{ 6070 ins_encode %{
6089 // [RGV] This next line should be generated from ADLC 6071 relocInfo::relocType constant_reloc = _opnds[1]->constant_reloc();
6090 if (_opnds[1]->constant_is_oop()) {
6091 intptr_t val = $con$$constant; 6072 intptr_t val = $con$$constant;
6073 if (constant_reloc == relocInfo::oop_type) {
6092 __ set_oop_constant((jobject) val, $dst$$Register); 6074 __ set_oop_constant((jobject) val, $dst$$Register);
6075 } else if (constant_reloc == relocInfo::metadata_type) {
6076 __ set_metadata_constant((Metadata*)val, $dst$$Register);
6093 } else { // non-oop pointers, e.g. card mark base, heap top 6077 } else { // non-oop pointers, e.g. card mark base, heap top
6094 __ set($con$$constant, $dst$$Register); 6078 assert(constant_reloc == relocInfo::none, "unexpected reloc type");
6079 __ set(val, $dst$$Register);
6095 } 6080 }
6096 %} 6081 %}
6097 ins_pipe(loadConP); 6082 ins_pipe(loadConP);
6098 %} 6083 %}
6099 6084