comparison src/share/vm/interpreter/rewriter.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children f6b0eb4e44cf
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
1 /* 1 /*
2 * Copyright (c) 1998, 2011, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
25 #include "precompiled.hpp" 25 #include "precompiled.hpp"
26 #include "interpreter/bytecodes.hpp" 26 #include "interpreter/bytecodes.hpp"
27 #include "interpreter/interpreter.hpp" 27 #include "interpreter/interpreter.hpp"
28 #include "interpreter/rewriter.hpp" 28 #include "interpreter/rewriter.hpp"
29 #include "memory/gcLocker.hpp" 29 #include "memory/gcLocker.hpp"
30 #include "memory/metadataFactory.hpp"
30 #include "memory/oopFactory.hpp" 31 #include "memory/oopFactory.hpp"
31 #include "memory/resourceArea.hpp" 32 #include "memory/resourceArea.hpp"
32 #include "oops/generateOopMap.hpp" 33 #include "oops/generateOopMap.hpp"
33 #include "oops/objArrayOop.hpp" 34 #include "oops/objArrayOop.hpp"
34 #include "oops/oop.inline.hpp" 35 #include "oops/oop.inline.hpp"
39 // that are referred to by the interpreter at runtime via the constant pool cache. 40 // that are referred to by the interpreter at runtime via the constant pool cache.
40 // Also computes a CP map (original_index -> new_index). 41 // Also computes a CP map (original_index -> new_index).
41 // Marks entries in CP which require additional processing. 42 // Marks entries in CP which require additional processing.
42 void Rewriter::compute_index_maps() { 43 void Rewriter::compute_index_maps() {
43 const int length = _pool->length(); 44 const int length = _pool->length();
44 init_cp_map(length); 45 init_maps(length);
45 bool saw_mh_symbol = false; 46 bool saw_mh_symbol = false;
46 for (int i = 0; i < length; i++) { 47 for (int i = 0; i < length; i++) {
47 int tag = _pool->tag_at(i).value(); 48 int tag = _pool->tag_at(i).value();
48 switch (tag) { 49 switch (tag) {
49 case JVM_CONSTANT_InterfaceMethodref: 50 case JVM_CONSTANT_InterfaceMethodref:
50 case JVM_CONSTANT_Fieldref : // fall through 51 case JVM_CONSTANT_Fieldref : // fall through
51 case JVM_CONSTANT_Methodref : // fall through 52 case JVM_CONSTANT_Methodref : // fall through
53 add_cp_cache_entry(i);
54 break;
55 case JVM_CONSTANT_String:
56 case JVM_CONSTANT_Object:
52 case JVM_CONSTANT_MethodHandle : // fall through 57 case JVM_CONSTANT_MethodHandle : // fall through
53 case JVM_CONSTANT_MethodType : // fall through 58 case JVM_CONSTANT_MethodType : // fall through
54 case JVM_CONSTANT_InvokeDynamic : // fall through 59 add_resolved_references_entry(i);
55 add_cp_cache_entry(i);
56 break; 60 break;
57 case JVM_CONSTANT_Utf8: 61 case JVM_CONSTANT_Utf8:
58 if (_pool->symbol_at(i) == vmSymbols::java_lang_invoke_MethodHandle()) 62 if (_pool->symbol_at(i) == vmSymbols::java_lang_invoke_MethodHandle())
59 saw_mh_symbol = true; 63 saw_mh_symbol = true;
60 break; 64 break;
61 } 65 }
62 } 66 }
63 67
68 // Record limits of resolved reference map for constant pool cache indices
69 record_map_limits();
70
64 guarantee((int)_cp_cache_map.length()-1 <= (int)((u2)-1), 71 guarantee((int)_cp_cache_map.length()-1 <= (int)((u2)-1),
65 "all cp cache indexes fit in a u2"); 72 "all cp cache indexes fit in a u2");
66 73
67 if (saw_mh_symbol) 74 if (saw_mh_symbol)
68 _method_handle_invokers.initialize(length, (int)0); 75 _method_handle_invokers.initialize(length, (int)0);
71 // Unrewrite the bytecodes if an error occurs. 78 // Unrewrite the bytecodes if an error occurs.
72 void Rewriter::restore_bytecodes() { 79 void Rewriter::restore_bytecodes() {
73 int len = _methods->length(); 80 int len = _methods->length();
74 81
75 for (int i = len-1; i >= 0; i--) { 82 for (int i = len-1; i >= 0; i--) {
76 methodOop method = (methodOop)_methods->obj_at(i); 83 Method* method = _methods->at(i);
77 scan_method(method, true); 84 scan_method(method, true);
78 } 85 }
79 } 86 }
80 87
81 // Creates a constant pool cache given a CPC map 88 // Creates a constant pool cache given a CPC map
82 void Rewriter::make_constant_pool_cache(TRAPS) { 89 void Rewriter::make_constant_pool_cache(TRAPS) {
83 const int length = _cp_cache_map.length(); 90 const int length = _cp_cache_map.length();
84 constantPoolCacheOop cache = 91 ClassLoaderData* loader_data = _pool->pool_holder()->class_loader_data();
85 oopFactory::new_constantPoolCache(length, CHECK); 92 ConstantPoolCache* cache =
93 ConstantPoolCache::allocate(loader_data, length, CHECK);
94
95 // initialize object cache in constant pool
96 _pool->initialize_resolved_references(loader_data, _resolved_references_map,
97 _resolved_reference_limit,
98 CHECK);
99
86 No_Safepoint_Verifier nsv; 100 No_Safepoint_Verifier nsv;
87 cache->initialize(_cp_cache_map); 101 cache->initialize(_cp_cache_map, _invokedynamic_references_map);
88 _pool->set_cache(cache); 102 _pool->set_cache(cache);
89 cache->set_constant_pool(_pool()); 103 cache->set_constant_pool(_pool());
90 } 104 }
91 105
92 106
136 if (!reverse) { 150 if (!reverse) {
137 int cp_index = Bytes::get_Java_u2(p); 151 int cp_index = Bytes::get_Java_u2(p);
138 int cache_index = cp_entry_to_cp_cache(cp_index); 152 int cache_index = cp_entry_to_cp_cache(cp_index);
139 Bytes::put_native_u2(p, cache_index); 153 Bytes::put_native_u2(p, cache_index);
140 if (!_method_handle_invokers.is_empty()) 154 if (!_method_handle_invokers.is_empty())
141 maybe_rewrite_invokehandle(p - 1, cp_index, reverse); 155 maybe_rewrite_invokehandle(p - 1, cp_index, cache_index, reverse);
142 } else { 156 } else {
143 int cache_index = Bytes::get_native_u2(p); 157 int cache_index = Bytes::get_native_u2(p);
144 int pool_index = cp_cache_entry_pool_index(cache_index); 158 int pool_index = cp_cache_entry_pool_index(cache_index);
145 Bytes::put_Java_u2(p, pool_index); 159 Bytes::put_Java_u2(p, pool_index);
146 if (!_method_handle_invokers.is_empty()) 160 if (!_method_handle_invokers.is_empty())
147 maybe_rewrite_invokehandle(p - 1, pool_index, reverse); 161 maybe_rewrite_invokehandle(p - 1, pool_index, cache_index, reverse);
148 } 162 }
149 } 163 }
150 164
151 165
152 // Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.) 166 // Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.)
153 void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, bool reverse) { 167 void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, int cache_index, bool reverse) {
154 if (!reverse) { 168 if (!reverse) {
155 if ((*opc) == (u1)Bytecodes::_invokevirtual || 169 if ((*opc) == (u1)Bytecodes::_invokevirtual ||
156 // allow invokespecial as an alias, although it would be very odd: 170 // allow invokespecial as an alias, although it would be very odd:
157 (*opc) == (u1)Bytecodes::_invokespecial) { 171 (*opc) == (u1)Bytecodes::_invokespecial) {
158 assert(_pool->tag_at(cp_index).is_method(), "wrong index"); 172 assert(_pool->tag_at(cp_index).is_method(), "wrong index");
161 int status = _method_handle_invokers[cp_index]; 175 int status = _method_handle_invokers[cp_index];
162 assert(status >= -1 && status <= 1, "oob tri-state"); 176 assert(status >= -1 && status <= 1, "oob tri-state");
163 if (status == 0) { 177 if (status == 0) {
164 if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() && 178 if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() &&
165 MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(), 179 MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(),
166 _pool->name_ref_at(cp_index))) 180 _pool->name_ref_at(cp_index))) {
181 // we may need a resolved_refs entry for the appendix
182 add_invokedynamic_resolved_references_entry(cp_index, cache_index);
167 status = +1; 183 status = +1;
168 else 184 } else {
169 status = -1; 185 status = -1;
186 }
170 _method_handle_invokers[cp_index] = status; 187 _method_handle_invokers[cp_index] = status;
171 } 188 }
172 // We use a special internal bytecode for such methods (if non-static). 189 // We use a special internal bytecode for such methods (if non-static).
173 // The basic reason for this is that such methods need an extra "appendix" argument 190 // The basic reason for this is that such methods need an extra "appendix" argument
174 // to transmit the call site's intended call type. 191 // to transmit the call site's intended call type.
191 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) { 208 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
192 address p = bcp + offset; 209 address p = bcp + offset;
193 assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode"); 210 assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
194 if (!reverse) { 211 if (!reverse) {
195 int cp_index = Bytes::get_Java_u2(p); 212 int cp_index = Bytes::get_Java_u2(p);
196 int cpc = maybe_add_cp_cache_entry(cp_index); // add lazily 213 int cache_index = add_invokedynamic_cp_cache_entry(cp_index);
197 int cpc2 = add_secondary_cp_cache_entry(cpc); 214 add_invokedynamic_resolved_references_entry(cp_index, cache_index);
198
199 // Replace the trailing four bytes with a CPC index for the dynamic 215 // Replace the trailing four bytes with a CPC index for the dynamic
200 // call site. Unlike other CPC entries, there is one per bytecode, 216 // call site. Unlike other CPC entries, there is one per bytecode,
201 // not just one per distinct CP entry. In other words, the 217 // not just one per distinct CP entry. In other words, the
202 // CPC-to-CP relation is many-to-one for invokedynamic entries. 218 // CPC-to-CP relation is many-to-one for invokedynamic entries.
203 // This means we must use a larger index size than u2 to address 219 // This means we must use a larger index size than u2 to address
204 // all these entries. That is the main reason invokedynamic 220 // all these entries. That is the main reason invokedynamic
205 // must have a five-byte instruction format. (Of course, other JVM 221 // must have a five-byte instruction format. (Of course, other JVM
206 // implementations can use the bytes for other purposes.) 222 // implementations can use the bytes for other purposes.)
207 Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2)); 223 Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index));
208 // Note: We use native_u4 format exclusively for 4-byte indexes. 224 // Note: We use native_u4 format exclusively for 4-byte indexes.
209 } else { 225 } else {
210 int cache_index = constantPoolCacheOopDesc::decode_secondary_index( 226 // callsite index
227 int cache_index = ConstantPool::decode_invokedynamic_index(
211 Bytes::get_native_u4(p)); 228 Bytes::get_native_u4(p));
212 int secondary_index = cp_cache_secondary_entry_main_index(cache_index); 229 int cp_index = cp_cache_entry_pool_index(cache_index);
213 int pool_index = cp_cache_entry_pool_index(secondary_index); 230 assert(_pool->tag_at(cp_index).is_invoke_dynamic(), "wrong index");
214 assert(_pool->tag_at(pool_index).is_invoke_dynamic(), "wrong index");
215 // zero out 4 bytes 231 // zero out 4 bytes
216 Bytes::put_Java_u4(p, 0); 232 Bytes::put_Java_u4(p, 0);
217 Bytes::put_Java_u2(p, pool_index); 233 Bytes::put_Java_u2(p, cp_index);
218 } 234 }
219 } 235 }
220 236
221 237
222 // Rewrite some ldc bytecodes to _fast_aldc 238 // Rewrite some ldc bytecodes to _fast_aldc
225 if (!reverse) { 241 if (!reverse) {
226 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode"); 242 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
227 address p = bcp + offset; 243 address p = bcp + offset;
228 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p); 244 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
229 constantTag tag = _pool->tag_at(cp_index).value(); 245 constantTag tag = _pool->tag_at(cp_index).value();
230 if (tag.is_method_handle() || tag.is_method_type()) { 246 if (tag.is_method_handle() || tag.is_method_type() || tag.is_string() || tag.is_object()) {
231 int cache_index = cp_entry_to_cp_cache(cp_index); 247 int ref_index = cp_entry_to_resolved_references(cp_index);
232 if (is_wide) { 248 if (is_wide) {
233 (*bcp) = Bytecodes::_fast_aldc_w; 249 (*bcp) = Bytecodes::_fast_aldc_w;
234 assert(cache_index == (u2)cache_index, "index overflow"); 250 assert(ref_index == (u2)ref_index, "index overflow");
235 Bytes::put_native_u2(p, cache_index); 251 Bytes::put_native_u2(p, ref_index);
236 } else { 252 } else {
237 (*bcp) = Bytecodes::_fast_aldc; 253 (*bcp) = Bytecodes::_fast_aldc;
238 assert(cache_index == (u1)cache_index, "index overflow"); 254 assert(ref_index == (u1)ref_index, "index overflow");
239 (*p) = (u1)cache_index; 255 (*p) = (u1)ref_index;
240 } 256 }
241 } 257 }
242 } else { 258 } else {
243 Bytecodes::Code rewritten_bc = 259 Bytecodes::Code rewritten_bc =
244 (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc); 260 (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
245 if ((*bcp) == rewritten_bc) { 261 if ((*bcp) == rewritten_bc) {
246 address p = bcp + offset; 262 address p = bcp + offset;
247 int cache_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p); 263 int ref_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
248 int pool_index = cp_cache_entry_pool_index(cache_index); 264 int pool_index = resolved_references_entry_to_pool_index(ref_index);
249 if (is_wide) { 265 if (is_wide) {
250 (*bcp) = Bytecodes::_ldc_w; 266 (*bcp) = Bytecodes::_ldc_w;
251 assert(pool_index == (u2)pool_index, "index overflow"); 267 assert(pool_index == (u2)pool_index, "index overflow");
252 Bytes::put_Java_u2(p, pool_index); 268 Bytes::put_Java_u2(p, pool_index);
253 } else { 269 } else {
259 } 275 }
260 } 276 }
261 277
262 278
263 // Rewrites a method given the index_map information 279 // Rewrites a method given the index_map information
264 void Rewriter::scan_method(methodOop method, bool reverse) { 280 void Rewriter::scan_method(Method* method, bool reverse) {
265 281
266 int nof_jsrs = 0; 282 int nof_jsrs = 0;
267 bool has_monitor_bytecodes = false; 283 bool has_monitor_bytecodes = false;
268 284
269 { 285 {
270 // We cannot tolerate a GC in this block, because we've 286 // We cannot tolerate a GC in this block, because we've
271 // cached the bytecodes in 'code_base'. If the methodOop 287 // cached the bytecodes in 'code_base'. If the Method*
272 // moves, the bytecodes will also move. 288 // moves, the bytecodes will also move.
273 No_Safepoint_Verifier nsv; 289 No_Safepoint_Verifier nsv;
274 Bytecodes::Code c; 290 Bytecodes::Code c;
275 291
276 // Bytecodes and their length 292 // Bytecodes and their length
369 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) { 385 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
370 ResourceMark rm(THREAD); 386 ResourceMark rm(THREAD);
371 ResolveOopMapConflicts romc(method); 387 ResolveOopMapConflicts romc(method);
372 methodHandle original_method = method; 388 methodHandle original_method = method;
373 method = romc.do_potential_rewrite(CHECK_(methodHandle())); 389 method = romc.do_potential_rewrite(CHECK_(methodHandle()));
374 if (method() != original_method()) {
375 // Insert invalid bytecode into original methodOop and set
376 // interpreter entrypoint, so that a executing this method
377 // will manifest itself in an easy recognizable form.
378 address bcp = original_method->bcp_from(0);
379 *bcp = (u1)Bytecodes::_shouldnotreachhere;
380 int kind = Interpreter::method_kind(original_method);
381 original_method->set_interpreter_kind(kind);
382 }
383
384 // Update monitor matching info. 390 // Update monitor matching info.
385 if (romc.monitor_safe()) { 391 if (romc.monitor_safe()) {
386 method->set_guaranteed_monitor_matching(); 392 method->set_guaranteed_monitor_matching();
387 } 393 }
388 394
394 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK); 400 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK);
395 // (That's all, folks.) 401 // (That's all, folks.)
396 } 402 }
397 403
398 404
399 void Rewriter::rewrite(instanceKlassHandle klass, constantPoolHandle cpool, objArrayHandle methods, TRAPS) { 405 void Rewriter::rewrite(instanceKlassHandle klass, constantPoolHandle cpool, Array<Method*>* methods, TRAPS) {
400 ResourceMark rm(THREAD); 406 ResourceMark rm(THREAD);
401 Rewriter rw(klass, cpool, methods, CHECK); 407 Rewriter rw(klass, cpool, methods, CHECK);
402 // (That's all, folks.) 408 // (That's all, folks.)
403 } 409 }
404 410
405 411
406 Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, objArrayHandle methods, TRAPS) 412 Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, Array<Method*>* methods, TRAPS)
407 : _klass(klass), 413 : _klass(klass),
408 _pool(cpool), 414 _pool(cpool),
409 _methods(methods) 415 _methods(methods)
410 { 416 {
411 assert(_pool->cache() == NULL, "constant pool cache must not be set yet"); 417 assert(_pool->cache() == NULL, "constant pool cache must not be set yet");
412 418
413 // determine index maps for methodOop rewriting 419 // determine index maps for Method* rewriting
414 compute_index_maps(); 420 compute_index_maps();
415 421
416 if (RegisterFinalizersAtInit && _klass->name() == vmSymbols::java_lang_Object()) { 422 if (RegisterFinalizersAtInit && _klass->name() == vmSymbols::java_lang_Object()) {
417 bool did_rewrite = false; 423 bool did_rewrite = false;
418 int i = _methods->length(); 424 int i = _methods->length();
419 while (i-- > 0) { 425 while (i-- > 0) {
420 methodOop method = (methodOop)_methods->obj_at(i); 426 Method* method = _methods->at(i);
421 if (method->intrinsic_id() == vmIntrinsics::_Object_init) { 427 if (method->intrinsic_id() == vmIntrinsics::_Object_init) {
422 // rewrite the return bytecodes of Object.<init> to register the 428 // rewrite the return bytecodes of Object.<init> to register the
423 // object for finalization if needed. 429 // object for finalization if needed.
424 methodHandle m(THREAD, method); 430 methodHandle m(THREAD, method);
425 rewrite_Object_init(m, CHECK); 431 rewrite_Object_init(m, CHECK);
432 438
433 // rewrite methods, in two passes 439 // rewrite methods, in two passes
434 int len = _methods->length(); 440 int len = _methods->length();
435 441
436 for (int i = len-1; i >= 0; i--) { 442 for (int i = len-1; i >= 0; i--) {
437 methodOop method = (methodOop)_methods->obj_at(i); 443 Method* method = _methods->at(i);
438 scan_method(method); 444 scan_method(method);
439 } 445 }
440 446
441 // allocate constant pool cache, now that we've seen all the bytecodes 447 // allocate constant pool cache, now that we've seen all the bytecodes
442 make_constant_pool_cache(THREAD); 448 make_constant_pool_cache(THREAD);
453 // stage because it can throw other exceptions, leaving the bytecodes 459 // stage because it can throw other exceptions, leaving the bytecodes
454 // pointing at constant pool cache entries. 460 // pointing at constant pool cache entries.
455 // Link and check jvmti dependencies while we're iterating over the methods. 461 // Link and check jvmti dependencies while we're iterating over the methods.
456 // JSR292 code calls with a different set of methods, so two entry points. 462 // JSR292 code calls with a different set of methods, so two entry points.
457 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) { 463 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) {
458 objArrayHandle methods(THREAD, this_oop->methods()); 464 relocate_and_link(this_oop, this_oop->methods(), THREAD);
459 relocate_and_link(this_oop, methods, THREAD);
460 } 465 }
461 466
462 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, 467 void Rewriter::relocate_and_link(instanceKlassHandle this_oop,
463 objArrayHandle methods, TRAPS) { 468 Array<Method*>* methods, TRAPS) {
464 int len = methods->length(); 469 int len = methods->length();
465 for (int i = len-1; i >= 0; i--) { 470 for (int i = len-1; i >= 0; i--) {
466 methodHandle m(THREAD, (methodOop)methods->obj_at(i)); 471 methodHandle m(THREAD, methods->at(i));
467 472
468 if (m->has_jsrs()) { 473 if (m->has_jsrs()) {
469 m = rewrite_jsrs(m, CHECK); 474 m = rewrite_jsrs(m, CHECK);
470 // Method might have gotten rewritten. 475 // Method might have gotten rewritten.
471 methods->obj_at_put(i, m()); 476 methods->at_put(i, m());
472 } 477 }
473 478
474 // Set up method entry points for compiler and interpreter . 479 // Set up method entry points for compiler and interpreter .
475 m->link_method(m, CHECK); 480 m->link_method(m, CHECK);
476 481
479 if (StressMethodComparator) { 484 if (StressMethodComparator) {
480 static int nmc = 0; 485 static int nmc = 0;
481 for (int j = i; j >= 0 && j >= i-4; j--) { 486 for (int j = i; j >= 0 && j >= i-4; j--) {
482 if ((++nmc % 1000) == 0) tty->print_cr("Have run MethodComparator %d times...", nmc); 487 if ((++nmc % 1000) == 0) tty->print_cr("Have run MethodComparator %d times...", nmc);
483 bool z = MethodComparator::methods_EMCP(m(), 488 bool z = MethodComparator::methods_EMCP(m(),
484 (methodOop)methods->obj_at(j)); 489 methods->at(j));
485 if (j == i && !z) { 490 if (j == i && !z) {
486 tty->print("MethodComparator FAIL: "); m->print(); m->print_codes(); 491 tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
487 assert(z, "method must compare equal to itself"); 492 assert(z, "method must compare equal to itself");
488 } 493 }
489 } 494 }