Mercurial > hg > truffle
diff src/cpu/sparc/vm/sparc.ad @ 6725:da91efe96a93
6964458: Reimplement class meta-data storage to use native memory
Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes
Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland
Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author | coleenp |
---|---|
date | Sat, 01 Sep 2012 13:25:18 -0400 |
parents | 8c92982cbbc4 |
children | 7eca5de9e0b6 |
line wrap: on
line diff
--- a/src/cpu/sparc/vm/sparc.ad Fri Aug 31 16:39:35 2012 -0700 +++ b/src/cpu/sparc/vm/sparc.ad Sat Sep 01 13:25:18 2012 -0400 @@ -549,15 +549,15 @@ int vtable_index = this->_vtable_index; if (vtable_index < 0) { // must be invalid_vtable_index, not nonvirtual_vtable_index - assert(vtable_index == methodOopDesc::invalid_vtable_index, "correct sentinel value"); + assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value"); return (NativeMovConstReg::instruction_size + NativeCall::instruction_size); // sethi; setlo; call; delay slot } else { assert(!UseInlineCaches, "expect vtable calls only if not using ICs"); - int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size(); + int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size(); int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes(); int klass_load_size; - if (UseCompressedOops) { + if (UseCompressedOops && UseCompressedKlassPointers) { assert(Universe::heap() != NULL, "java heap should be initialized"); if (Universe::narrow_oop_base() == NULL) klass_load_size = 2*BytesPerInstWord; // see MacroAssembler::load_klass() @@ -1676,7 +1676,7 @@ // static stub relocation stores the instruction address of the call __ relocate(static_stub_Relocation::spec(mark)); - __ set_oop(NULL, reg_to_register_object(Matcher::inline_cache_reg_encode())); + __ set_metadata(NULL, reg_to_register_object(Matcher::inline_cache_reg_encode())); __ set_inst_mark(); AddressLiteral addrlit(-1); @@ -1852,18 +1852,6 @@ address last_rethrow = NULL; // debugging aid for Rethrow encoding #endif -// Map Types to machine register types -const int Matcher::base2reg[Type::lastype] = { - Node::NotAMachineReg,0,0, Op_RegI, Op_RegL, 0, Op_RegN, - Node::NotAMachineReg, Node::NotAMachineReg, /* tuple, array */ - 0, Op_RegD, 0, 0, /* Vectors */ - Op_RegP, Op_RegP, Op_RegP, Op_RegP, Op_RegP, Op_RegP, /* the pointers */ - 0, 0/*abio*/, - Op_RegP /* Return address */, 0, /* the memories */ - Op_RegF, Op_RegF, Op_RegF, Op_RegD, Op_RegD, Op_RegD, - 0 /*bottom*/ -}; - // Vector width in bytes const int Matcher::vector_width_in_bytes(BasicType bt) { assert(MaxVectorSize == 8, ""); @@ -2590,20 +2578,11 @@ // MachCallDynamicJavaNode::ret_addr_offset uses this same test if (vtable_index < 0) { // must be invalid_vtable_index, not nonvirtual_vtable_index - assert(vtable_index == methodOopDesc::invalid_vtable_index, "correct sentinel value"); + assert(vtable_index == Method::invalid_vtable_index, "correct sentinel value"); Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode()); assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()"); assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub"); - // !!!!! - // Generate "set 0x01, R_G5", placeholder instruction to load oop-info - // emit_call_dynamic_prologue( cbuf ); - __ set_oop((jobject)Universe::non_oop_word(), G5_ic_reg); - - address virtual_call_oop_addr = __ inst_mark(); - // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine - // who we intended to call. - __ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr)); - emit_call_reloc(cbuf, $meth$$method, relocInfo::none); + __ ic_call((address)$meth$$method); } else { assert(!UseInlineCaches, "expect vtable calls only if not using ICs"); // Just go thru the vtable @@ -2612,7 +2591,7 @@ int off = __ offset(); __ load_klass(O0, G3_scratch); int klass_load_size; - if (UseCompressedOops) { + if (UseCompressedOops && UseCompressedKlassPointers) { assert(Universe::heap() != NULL, "java heap should be initialized"); if (Universe::narrow_oop_base() == NULL) klass_load_size = 2*BytesPerInstWord; @@ -2621,7 +2600,7 @@ } else { klass_load_size = 1*BytesPerInstWord; } - int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size(); + int entry_offset = InstanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size(); int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes(); if (Assembler::is_simm13(v_off)) { __ ld_ptr(G3, v_off, G5_method); @@ -2637,7 +2616,7 @@ // NOTE: for vtable dispatches, the vtable entry will never be null. // However it may very well end up in handle_wrong_method if the // method is abstract for the particular class. - __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch); + __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch); // jump to target (either compiled code or c2iadapter) __ jmpl(G3_scratch, G0, O7); __ delayed()->nop(); @@ -2653,7 +2632,7 @@ assert(temp_reg != G5_ic_reg, "conflicting registers"); // Load nmethod - __ ld_ptr(G5_ic_reg, in_bytes(methodOopDesc::from_compiled_offset()), temp_reg); + __ ld_ptr(G5_ic_reg, in_bytes(Method::from_compiled_offset()), temp_reg); // CALL to compiled java, indirect the contents of G3 __ set_inst_mark(); @@ -3219,7 +3198,7 @@ // These two registers define part of the calling convention // between compiled code and the interpreter. - inline_cache_reg(R_G5); // Inline Cache Register or methodOop for I2C + inline_cache_reg(R_G5); // Inline Cache Register or Method* for I2C interpreter_method_oop_reg(R_G5); // Method Oop Register when calling interpreter // Optional: name the operand used by cisc-spilling to access [stack_pointer + offset] @@ -6070,12 +6049,15 @@ ins_cost(DEFAULT_COST * 3/2); format %{ "SET $con,$dst\t!ptr" %} ins_encode %{ - // [RGV] This next line should be generated from ADLC - if (_opnds[1]->constant_is_oop()) { + relocInfo::relocType constant_reloc = _opnds[1]->constant_reloc(); intptr_t val = $con$$constant; + if (constant_reloc == relocInfo::oop_type) { __ set_oop_constant((jobject) val, $dst$$Register); + } else if (constant_reloc == relocInfo::metadata_type) { + __ set_metadata_constant((Metadata*)val, $dst$$Register); } else { // non-oop pointers, e.g. card mark base, heap top - __ set($con$$constant, $dst$$Register); + assert(constant_reloc == relocInfo::none, "unexpected reloc type"); + __ set(val, $dst$$Register); } %} ins_pipe(loadConP); @@ -6086,12 +6068,15 @@ ins_cost(DEFAULT_COST * 3/2); format %{ "SET $con,$dst\t! ptr" %} ins_encode %{ - // [RGV] This next line should be generated from ADLC - if (_opnds[1]->constant_is_oop()) { + relocInfo::relocType constant_reloc = _opnds[1]->constant_reloc(); intptr_t val = $con$$constant; + if (constant_reloc == relocInfo::oop_type) { __ set_oop_constant((jobject) val, $dst$$Register); + } else if (constant_reloc == relocInfo::metadata_type) { + __ set_metadata_constant((Metadata*)val, $dst$$Register); } else { // non-oop pointers, e.g. card mark base, heap top - __ set($con$$constant, $dst$$Register); + assert(constant_reloc == relocInfo::none, "unexpected reloc type"); + __ set(val, $dst$$Register); } %} ins_pipe(loadConP);