comparison src/cpu/sparc/vm/vtableStubs_sparc.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children 8e47bac5643a
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
1 /* 1 /*
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
67 67
68 // get receiver klass 68 // get receiver klass
69 address npe_addr = __ pc(); 69 address npe_addr = __ pc();
70 __ load_klass(O0, G3_scratch); 70 __ load_klass(O0, G3_scratch);
71 71
72 // set methodOop (in case of interpreted method), and destination address 72 // set Method* (in case of interpreted method), and destination address
73 #ifndef PRODUCT 73 #ifndef PRODUCT
74 if (DebugVtables) { 74 if (DebugVtables) {
75 Label L; 75 Label L;
76 // check offset vs vtable length 76 // check offset vs vtable length
77 __ ld(G3_scratch, instanceKlass::vtable_length_offset()*wordSize, G5); 77 __ ld(G3_scratch, InstanceKlass::vtable_length_offset()*wordSize, G5);
78 __ cmp_and_br_short(G5, vtable_index*vtableEntry::size(), Assembler::greaterUnsigned, Assembler::pt, L); 78 __ cmp_and_br_short(G5, vtable_index*vtableEntry::size(), Assembler::greaterUnsigned, Assembler::pt, L);
79 __ set(vtable_index, O2); 79 __ set(vtable_index, O2);
80 __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), O0, O2); 80 __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), O0, O2);
81 __ bind(L); 81 __ bind(L);
82 } 82 }
94 #endif 94 #endif
95 95
96 address ame_addr = __ pc(); // if the vtable entry is null, the method is abstract 96 address ame_addr = __ pc(); // if the vtable entry is null, the method is abstract
97 // NOTE: for vtable dispatches, the vtable entry will never be null. 97 // NOTE: for vtable dispatches, the vtable entry will never be null.
98 98
99 __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch); 99 __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch);
100 100
101 // jump to target (either compiled code or c2iadapter) 101 // jump to target (either compiled code or c2iadapter)
102 __ JMP(G3_scratch, 0); 102 __ JMP(G3_scratch, 0);
103 // load methodOop (in case we call c2iadapter) 103 // load Method* (in case we call c2iadapter)
104 __ delayed()->nop(); 104 __ delayed()->nop();
105 105
106 masm->flush(); 106 masm->flush();
107 107
108 if (PrintMiscellaneous && (WizardMode || Verbose)) { 108 if (PrintMiscellaneous && (WizardMode || Verbose)) {
128 VtableStub* s = new(sparc_code_length) VtableStub(false, itable_index); 128 VtableStub* s = new(sparc_code_length) VtableStub(false, itable_index);
129 ResourceMark rm; 129 ResourceMark rm;
130 CodeBuffer cb(s->entry_point(), sparc_code_length); 130 CodeBuffer cb(s->entry_point(), sparc_code_length);
131 MacroAssembler* masm = new MacroAssembler(&cb); 131 MacroAssembler* masm = new MacroAssembler(&cb);
132 132
133 Register G3_klassOop = G3_scratch; 133 Register G3_Klass = G3_scratch;
134 Register G5_interface = G5; // Passed in as an argument 134 Register G5_interface = G5; // Passed in as an argument
135 Label search; 135 Label search;
136 136
137 // Entry arguments: 137 // Entry arguments:
138 // G5_interface: Interface 138 // G5_interface: Interface
139 // O0: Receiver 139 // O0: Receiver
140 assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0"); 140 assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0");
141 141
142 // get receiver klass (also an implicit null-check) 142 // get receiver klass (also an implicit null-check)
143 address npe_addr = __ pc(); 143 address npe_addr = __ pc();
144 __ load_klass(O0, G3_klassOop); 144 __ load_klass(O0, G3_Klass);
145 __ verify_oop(G3_klassOop);
146 145
147 // Push a new window to get some temp registers. This chops the head of all 146 // Push a new window to get some temp registers. This chops the head of all
148 // my 64-bit %o registers in the LION build, but this is OK because no longs 147 // my 64-bit %o registers in the LION build, but this is OK because no longs
149 // are passed in the %o registers. Instead, longs are passed in G1 and G4 148 // are passed in the %o registers. Instead, longs are passed in G1 and G4
150 // and so those registers are not available here. 149 // and so those registers are not available here.
158 157
159 Label throw_icce; 158 Label throw_icce;
160 159
161 Register L5_method = L5; 160 Register L5_method = L5;
162 __ lookup_interface_method(// inputs: rec. class, interface, itable index 161 __ lookup_interface_method(// inputs: rec. class, interface, itable index
163 G3_klassOop, G5_interface, itable_index, 162 G3_Klass, G5_interface, itable_index,
164 // outputs: method, scan temp. reg 163 // outputs: method, scan temp. reg
165 L5_method, L2, L3, 164 L5_method, L2, L3,
166 throw_icce); 165 throw_icce);
167 166
168 #ifndef PRODUCT 167 #ifndef PRODUCT
169 if (DebugVtables) { 168 if (DebugVtables) {
170 Label L01; 169 Label L01;
171 __ br_notnull_short(L5_method, Assembler::pt, L01); 170 __ br_notnull_short(L5_method, Assembler::pt, L01);
172 __ stop("methodOop is null"); 171 __ stop("Method* is null");
173 __ bind(L01); 172 __ bind(L01);
174 __ verify_oop(L5_method);
175 } 173 }
176 #endif 174 #endif
177 175
178 // If the following load is through a NULL pointer, we'll take an OS 176 // If the following load is through a NULL pointer, we'll take an OS
179 // exception that should translate into an AbstractMethodError. We need the 177 // exception that should translate into an AbstractMethodError. We need the
180 // window count to be correct at that time. 178 // window count to be correct at that time.
181 __ restore(L5_method, 0, G5_method); 179 __ restore(L5_method, 0, G5_method);
182 // Restore registers *before* the AME point. 180 // Restore registers *before* the AME point.
183 181
184 address ame_addr = __ pc(); // if the vtable entry is null, the method is abstract 182 address ame_addr = __ pc(); // if the vtable entry is null, the method is abstract
185 __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch); 183 __ ld_ptr(G5_method, in_bytes(Method::from_compiled_offset()), G3_scratch);
186 184
187 // G5_method: methodOop 185 // G5_method: Method*
188 // O0: Receiver 186 // O0: Receiver
189 // G3_scratch: entry point 187 // G3_scratch: entry point
190 __ JMP(G3_scratch, 0); 188 __ JMP(G3_scratch, 0);
191 __ delayed()->nop(); 189 __ delayed()->nop();
192 190
219 const int slop = 2*BytesPerInstWord; // sethi;add (needed for long offsets) 217 const int slop = 2*BytesPerInstWord; // sethi;add (needed for long offsets)
220 if (is_vtable_stub) { 218 if (is_vtable_stub) {
221 // ld;ld;ld,jmp,nop 219 // ld;ld;ld,jmp,nop
222 const int basic = 5*BytesPerInstWord + 220 const int basic = 5*BytesPerInstWord +
223 // shift;add for load_klass (only shift with zero heap based) 221 // shift;add for load_klass (only shift with zero heap based)
224 (UseCompressedOops ? 222 (UseCompressedKlassPointers ?
225 ((Universe::narrow_oop_base() == NULL) ? BytesPerInstWord : 2*BytesPerInstWord) : 0); 223 ((Universe::narrow_oop_base() == NULL) ? BytesPerInstWord : 2*BytesPerInstWord) : 0);
226 return basic + slop; 224 return basic + slop;
227 } else { 225 } else {
228 const int basic = (28 LP64_ONLY(+ 6)) * BytesPerInstWord + 226 const int basic = (28 LP64_ONLY(+ 6)) * BytesPerInstWord +
229 // shift;add for load_klass (only shift with zero heap based) 227 // shift;add for load_klass (only shift with zero heap based)
230 (UseCompressedOops ? 228 (UseCompressedKlassPointers ?
231 ((Universe::narrow_oop_base() == NULL) ? BytesPerInstWord : 2*BytesPerInstWord) : 0); 229 ((Universe::narrow_oop_base() == NULL) ? BytesPerInstWord : 2*BytesPerInstWord) : 0);
232 return (basic + slop); 230 return (basic + slop);
233 } 231 }
234 } 232 }
235 233