comparison src/share/vm/interpreter/rewriter.cpp @ 7066:7d815d842ee0

Merge.
author Christian Haeubl <haeubl@ssw.jku.at>
date Fri, 23 Nov 2012 11:50:27 +0100
parents e522a00b91aa
children 5d0bb7d52783
comparison
equal deleted inserted replaced
7065:cfacf5d5bade 7066:7d815d842ee0
1 /* 1 /*
2 * Copyright (c) 1998, 2011, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
25 #include "precompiled.hpp" 25 #include "precompiled.hpp"
26 #include "interpreter/bytecodes.hpp" 26 #include "interpreter/bytecodes.hpp"
27 #include "interpreter/interpreter.hpp" 27 #include "interpreter/interpreter.hpp"
28 #include "interpreter/rewriter.hpp" 28 #include "interpreter/rewriter.hpp"
29 #include "memory/gcLocker.hpp" 29 #include "memory/gcLocker.hpp"
30 #include "memory/metadataFactory.hpp"
30 #include "memory/oopFactory.hpp" 31 #include "memory/oopFactory.hpp"
31 #include "memory/resourceArea.hpp" 32 #include "memory/resourceArea.hpp"
32 #include "oops/generateOopMap.hpp" 33 #include "oops/generateOopMap.hpp"
33 #include "oops/objArrayOop.hpp" 34 #include "oops/objArrayOop.hpp"
34 #include "oops/oop.inline.hpp" 35 #include "oops/oop.inline.hpp"
39 // that are referred to by the interpreter at runtime via the constant pool cache. 40 // that are referred to by the interpreter at runtime via the constant pool cache.
40 // Also computes a CP map (original_index -> new_index). 41 // Also computes a CP map (original_index -> new_index).
41 // Marks entries in CP which require additional processing. 42 // Marks entries in CP which require additional processing.
42 void Rewriter::compute_index_maps() { 43 void Rewriter::compute_index_maps() {
43 const int length = _pool->length(); 44 const int length = _pool->length();
44 init_cp_map(length); 45 init_maps(length);
45 bool saw_mh_symbol = false; 46 bool saw_mh_symbol = false;
46 for (int i = 0; i < length; i++) { 47 for (int i = 0; i < length; i++) {
47 int tag = _pool->tag_at(i).value(); 48 int tag = _pool->tag_at(i).value();
48 switch (tag) { 49 switch (tag) {
49 case JVM_CONSTANT_InterfaceMethodref: 50 case JVM_CONSTANT_InterfaceMethodref:
50 case JVM_CONSTANT_Fieldref : // fall through 51 case JVM_CONSTANT_Fieldref : // fall through
51 case JVM_CONSTANT_Methodref : // fall through 52 case JVM_CONSTANT_Methodref : // fall through
53 add_cp_cache_entry(i);
54 break;
55 case JVM_CONSTANT_String:
56 case JVM_CONSTANT_Object:
52 case JVM_CONSTANT_MethodHandle : // fall through 57 case JVM_CONSTANT_MethodHandle : // fall through
53 case JVM_CONSTANT_MethodType : // fall through 58 case JVM_CONSTANT_MethodType : // fall through
54 case JVM_CONSTANT_InvokeDynamic : // fall through 59 add_resolved_references_entry(i);
55 add_cp_cache_entry(i);
56 break; 60 break;
57 case JVM_CONSTANT_Utf8: 61 case JVM_CONSTANT_Utf8:
58 if (_pool->symbol_at(i) == vmSymbols::java_lang_invoke_MethodHandle()) 62 if (_pool->symbol_at(i) == vmSymbols::java_lang_invoke_MethodHandle())
59 saw_mh_symbol = true; 63 saw_mh_symbol = true;
60 break; 64 break;
61 } 65 }
62 } 66 }
63 67
68 // Record limits of resolved reference map for constant pool cache indices
69 record_map_limits();
70
64 guarantee((int)_cp_cache_map.length()-1 <= (int)((u2)-1), 71 guarantee((int)_cp_cache_map.length()-1 <= (int)((u2)-1),
65 "all cp cache indexes fit in a u2"); 72 "all cp cache indexes fit in a u2");
66 73
67 if (saw_mh_symbol) 74 if (saw_mh_symbol)
68 _method_handle_invokers.initialize(length, (int)0); 75 _method_handle_invokers.initialize(length, (int)0);
71 // Unrewrite the bytecodes if an error occurs. 78 // Unrewrite the bytecodes if an error occurs.
72 void Rewriter::restore_bytecodes() { 79 void Rewriter::restore_bytecodes() {
73 int len = _methods->length(); 80 int len = _methods->length();
74 81
75 for (int i = len-1; i >= 0; i--) { 82 for (int i = len-1; i >= 0; i--) {
76 methodOop method = (methodOop)_methods->obj_at(i); 83 Method* method = _methods->at(i);
77 scan_method(method, true); 84 scan_method(method, true);
78 } 85 }
79 } 86 }
80 87
81 // Creates a constant pool cache given a CPC map 88 // Creates a constant pool cache given a CPC map
82 void Rewriter::make_constant_pool_cache(TRAPS) { 89 void Rewriter::make_constant_pool_cache(TRAPS) {
83 const int length = _cp_cache_map.length(); 90 const int length = _cp_cache_map.length();
84 constantPoolCacheOop cache = 91 ClassLoaderData* loader_data = _pool->pool_holder()->class_loader_data();
85 oopFactory::new_constantPoolCache(length, CHECK); 92 ConstantPoolCache* cache =
93 ConstantPoolCache::allocate(loader_data, length, CHECK);
94
95 // initialize object cache in constant pool
96 _pool->initialize_resolved_references(loader_data, _resolved_references_map,
97 _resolved_reference_limit,
98 CHECK);
99
86 No_Safepoint_Verifier nsv; 100 No_Safepoint_Verifier nsv;
87 cache->initialize(_cp_cache_map); 101 cache->initialize(_cp_cache_map, _invokedynamic_references_map);
88 _pool->set_cache(cache); 102 _pool->set_cache(cache);
89 cache->set_constant_pool(_pool()); 103 cache->set_constant_pool(_pool());
90 } 104 }
91 105
92 106
140 if (!reverse) { 154 if (!reverse) {
141 int cp_index = Bytes::get_Java_u2(p); 155 int cp_index = Bytes::get_Java_u2(p);
142 int cache_index = cp_entry_to_cp_cache(cp_index); 156 int cache_index = cp_entry_to_cp_cache(cp_index);
143 Bytes::put_native_u2(p, cache_index); 157 Bytes::put_native_u2(p, cache_index);
144 if (!_method_handle_invokers.is_empty()) 158 if (!_method_handle_invokers.is_empty())
145 maybe_rewrite_invokehandle(p - 1, cp_index, reverse); 159 maybe_rewrite_invokehandle(p - 1, cp_index, cache_index, reverse);
146 } else { 160 } else {
147 int cache_index = Bytes::get_native_u2(p); 161 int cache_index = Bytes::get_native_u2(p);
148 int pool_index = cp_cache_entry_pool_index(cache_index); 162 int pool_index = cp_cache_entry_pool_index(cache_index);
149 Bytes::put_Java_u2(p, pool_index); 163 Bytes::put_Java_u2(p, pool_index);
150 if (!_method_handle_invokers.is_empty()) 164 if (!_method_handle_invokers.is_empty())
151 maybe_rewrite_invokehandle(p - 1, pool_index, reverse); 165 maybe_rewrite_invokehandle(p - 1, pool_index, cache_index, reverse);
152 } 166 }
153 } 167 }
154 168
155 169
156 // Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.) 170 // Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.)
157 void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, bool reverse) { 171 void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, int cache_index, bool reverse) {
158 if (!reverse) { 172 if (!reverse) {
159 if ((*opc) == (u1)Bytecodes::_invokevirtual || 173 if ((*opc) == (u1)Bytecodes::_invokevirtual ||
160 // allow invokespecial as an alias, although it would be very odd: 174 // allow invokespecial as an alias, although it would be very odd:
161 (*opc) == (u1)Bytecodes::_invokespecial) { 175 (*opc) == (u1)Bytecodes::_invokespecial) {
162 assert(_pool->tag_at(cp_index).is_method(), "wrong index"); 176 assert(_pool->tag_at(cp_index).is_method(), "wrong index");
165 int status = _method_handle_invokers[cp_index]; 179 int status = _method_handle_invokers[cp_index];
166 assert(status >= -1 && status <= 1, "oob tri-state"); 180 assert(status >= -1 && status <= 1, "oob tri-state");
167 if (status == 0) { 181 if (status == 0) {
168 if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() && 182 if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() &&
169 MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(), 183 MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(),
170 _pool->name_ref_at(cp_index))) 184 _pool->name_ref_at(cp_index))) {
185 // we may need a resolved_refs entry for the appendix
186 add_invokedynamic_resolved_references_entries(cp_index, cache_index);
171 status = +1; 187 status = +1;
172 else 188 } else {
173 status = -1; 189 status = -1;
190 }
174 _method_handle_invokers[cp_index] = status; 191 _method_handle_invokers[cp_index] = status;
175 } 192 }
176 // We use a special internal bytecode for such methods (if non-static). 193 // We use a special internal bytecode for such methods (if non-static).
177 // The basic reason for this is that such methods need an extra "appendix" argument 194 // The basic reason for this is that such methods need an extra "appendix" argument
178 // to transmit the call site's intended call type. 195 // to transmit the call site's intended call type.
195 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) { 212 void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
196 address p = bcp + offset; 213 address p = bcp + offset;
197 assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode"); 214 assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
198 if (!reverse) { 215 if (!reverse) {
199 int cp_index = Bytes::get_Java_u2(p); 216 int cp_index = Bytes::get_Java_u2(p);
200 int cpc = maybe_add_cp_cache_entry(cp_index); // add lazily 217 int cache_index = add_invokedynamic_cp_cache_entry(cp_index);
201 int cpc2 = add_secondary_cp_cache_entry(cpc); 218 add_invokedynamic_resolved_references_entries(cp_index, cache_index);
202
203 // Replace the trailing four bytes with a CPC index for the dynamic 219 // Replace the trailing four bytes with a CPC index for the dynamic
204 // call site. Unlike other CPC entries, there is one per bytecode, 220 // call site. Unlike other CPC entries, there is one per bytecode,
205 // not just one per distinct CP entry. In other words, the 221 // not just one per distinct CP entry. In other words, the
206 // CPC-to-CP relation is many-to-one for invokedynamic entries. 222 // CPC-to-CP relation is many-to-one for invokedynamic entries.
207 // This means we must use a larger index size than u2 to address 223 // This means we must use a larger index size than u2 to address
208 // all these entries. That is the main reason invokedynamic 224 // all these entries. That is the main reason invokedynamic
209 // must have a five-byte instruction format. (Of course, other JVM 225 // must have a five-byte instruction format. (Of course, other JVM
210 // implementations can use the bytes for other purposes.) 226 // implementations can use the bytes for other purposes.)
211 Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2)); 227 Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index));
212 // Note: We use native_u4 format exclusively for 4-byte indexes. 228 // Note: We use native_u4 format exclusively for 4-byte indexes.
213 } else { 229 } else {
214 int cache_index = constantPoolCacheOopDesc::decode_secondary_index( 230 // callsite index
231 int cache_index = ConstantPool::decode_invokedynamic_index(
215 Bytes::get_native_u4(p)); 232 Bytes::get_native_u4(p));
216 int secondary_index = cp_cache_secondary_entry_main_index(cache_index); 233 int cp_index = cp_cache_entry_pool_index(cache_index);
217 int pool_index = cp_cache_entry_pool_index(secondary_index); 234 assert(_pool->tag_at(cp_index).is_invoke_dynamic(), "wrong index");
218 assert(_pool->tag_at(pool_index).is_invoke_dynamic(), "wrong index");
219 // zero out 4 bytes 235 // zero out 4 bytes
220 Bytes::put_Java_u4(p, 0); 236 Bytes::put_Java_u4(p, 0);
221 Bytes::put_Java_u2(p, pool_index); 237 Bytes::put_Java_u2(p, cp_index);
222 } 238 }
223 } 239 }
224 240
225 241
226 // Rewrite some ldc bytecodes to _fast_aldc 242 // Rewrite some ldc bytecodes to _fast_aldc
229 if (!reverse) { 245 if (!reverse) {
230 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode"); 246 assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
231 address p = bcp + offset; 247 address p = bcp + offset;
232 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p); 248 int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
233 constantTag tag = _pool->tag_at(cp_index).value(); 249 constantTag tag = _pool->tag_at(cp_index).value();
234 if (tag.is_method_handle() || tag.is_method_type()) { 250 if (tag.is_method_handle() || tag.is_method_type() || tag.is_string() || tag.is_object()) {
235 int cache_index = cp_entry_to_cp_cache(cp_index); 251 int ref_index = cp_entry_to_resolved_references(cp_index);
236 if (is_wide) { 252 if (is_wide) {
237 (*bcp) = Bytecodes::_fast_aldc_w; 253 (*bcp) = Bytecodes::_fast_aldc_w;
238 assert(cache_index == (u2)cache_index, "index overflow"); 254 assert(ref_index == (u2)ref_index, "index overflow");
239 Bytes::put_native_u2(p, cache_index); 255 Bytes::put_native_u2(p, ref_index);
240 } else { 256 } else {
241 (*bcp) = Bytecodes::_fast_aldc; 257 (*bcp) = Bytecodes::_fast_aldc;
242 assert(cache_index == (u1)cache_index, "index overflow"); 258 assert(ref_index == (u1)ref_index, "index overflow");
243 (*p) = (u1)cache_index; 259 (*p) = (u1)ref_index;
244 } 260 }
245 } 261 }
246 } else { 262 } else {
247 Bytecodes::Code rewritten_bc = 263 Bytecodes::Code rewritten_bc =
248 (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc); 264 (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
249 if ((*bcp) == rewritten_bc) { 265 if ((*bcp) == rewritten_bc) {
250 address p = bcp + offset; 266 address p = bcp + offset;
251 int cache_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p); 267 int ref_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
252 int pool_index = cp_cache_entry_pool_index(cache_index); 268 int pool_index = resolved_references_entry_to_pool_index(ref_index);
253 if (is_wide) { 269 if (is_wide) {
254 (*bcp) = Bytecodes::_ldc_w; 270 (*bcp) = Bytecodes::_ldc_w;
255 assert(pool_index == (u2)pool_index, "index overflow"); 271 assert(pool_index == (u2)pool_index, "index overflow");
256 Bytes::put_Java_u2(p, pool_index); 272 Bytes::put_Java_u2(p, pool_index);
257 } else { 273 } else {
263 } 279 }
264 } 280 }
265 281
266 282
267 // Rewrites a method given the index_map information 283 // Rewrites a method given the index_map information
268 void Rewriter::scan_method(methodOop method, bool reverse) { 284 void Rewriter::scan_method(Method* method, bool reverse) {
269 285
270 int nof_jsrs = 0; 286 int nof_jsrs = 0;
271 bool has_monitor_bytecodes = false; 287 bool has_monitor_bytecodes = false;
272 288
273 { 289 {
274 // We cannot tolerate a GC in this block, because we've 290 // We cannot tolerate a GC in this block, because we've
275 // cached the bytecodes in 'code_base'. If the methodOop 291 // cached the bytecodes in 'code_base'. If the Method*
276 // moves, the bytecodes will also move. 292 // moves, the bytecodes will also move.
277 No_Safepoint_Verifier nsv; 293 No_Safepoint_Verifier nsv;
278 Bytecodes::Code c; 294 Bytecodes::Code c;
279 295
280 // Bytecodes and their length 296 // Bytecodes and their length
375 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) { 391 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
376 ResourceMark rm(THREAD); 392 ResourceMark rm(THREAD);
377 ResolveOopMapConflicts romc(method); 393 ResolveOopMapConflicts romc(method);
378 methodHandle original_method = method; 394 methodHandle original_method = method;
379 method = romc.do_potential_rewrite(CHECK_(methodHandle())); 395 method = romc.do_potential_rewrite(CHECK_(methodHandle()));
380 if (method() != original_method()) {
381 // Insert invalid bytecode into original methodOop and set
382 // interpreter entrypoint, so that a executing this method
383 // will manifest itself in an easy recognizable form.
384 address bcp = original_method->bcp_from(0);
385 *bcp = (u1)Bytecodes::_shouldnotreachhere;
386 int kind = Interpreter::method_kind(original_method);
387 original_method->set_interpreter_kind(kind);
388 }
389
390 // Update monitor matching info. 396 // Update monitor matching info.
391 if (romc.monitor_safe()) { 397 if (romc.monitor_safe()) {
392 method->set_guaranteed_monitor_matching(); 398 method->set_guaranteed_monitor_matching();
393 } 399 }
394 400
400 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK); 406 Rewriter rw(klass, klass->constants(), klass->methods(), CHECK);
401 // (That's all, folks.) 407 // (That's all, folks.)
402 } 408 }
403 409
404 410
405 void Rewriter::rewrite(instanceKlassHandle klass, constantPoolHandle cpool, objArrayHandle methods, TRAPS) { 411 void Rewriter::rewrite(instanceKlassHandle klass, constantPoolHandle cpool, Array<Method*>* methods, TRAPS) {
406 ResourceMark rm(THREAD); 412 ResourceMark rm(THREAD);
407 Rewriter rw(klass, cpool, methods, CHECK); 413 Rewriter rw(klass, cpool, methods, CHECK);
408 // (That's all, folks.) 414 // (That's all, folks.)
409 } 415 }
410 416
411 417
412 Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, objArrayHandle methods, TRAPS) 418 Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, Array<Method*>* methods, TRAPS)
413 : _klass(klass), 419 : _klass(klass),
414 _pool(cpool), 420 _pool(cpool),
415 _methods(methods) 421 _methods(methods)
416 { 422 {
417 assert(_pool->cache() == NULL, "constant pool cache must not be set yet"); 423 assert(_pool->cache() == NULL, "constant pool cache must not be set yet");
418 424
419 // determine index maps for methodOop rewriting 425 // determine index maps for Method* rewriting
420 compute_index_maps(); 426 compute_index_maps();
421 427
422 if (RegisterFinalizersAtInit && _klass->name() == vmSymbols::java_lang_Object()) { 428 if (RegisterFinalizersAtInit && _klass->name() == vmSymbols::java_lang_Object()) {
423 bool did_rewrite = false; 429 bool did_rewrite = false;
424 int i = _methods->length(); 430 int i = _methods->length();
425 while (i-- > 0) { 431 while (i-- > 0) {
426 methodOop method = (methodOop)_methods->obj_at(i); 432 Method* method = _methods->at(i);
427 if (method->intrinsic_id() == vmIntrinsics::_Object_init) { 433 if (method->intrinsic_id() == vmIntrinsics::_Object_init) {
428 // rewrite the return bytecodes of Object.<init> to register the 434 // rewrite the return bytecodes of Object.<init> to register the
429 // object for finalization if needed. 435 // object for finalization if needed.
430 methodHandle m(THREAD, method); 436 methodHandle m(THREAD, method);
431 rewrite_Object_init(m, CHECK); 437 rewrite_Object_init(m, CHECK);
438 444
439 // rewrite methods, in two passes 445 // rewrite methods, in two passes
440 int len = _methods->length(); 446 int len = _methods->length();
441 447
442 for (int i = len-1; i >= 0; i--) { 448 for (int i = len-1; i >= 0; i--) {
443 methodOop method = (methodOop)_methods->obj_at(i); 449 Method* method = _methods->at(i);
444 scan_method(method); 450 scan_method(method);
445 } 451 }
446 452
447 // allocate constant pool cache, now that we've seen all the bytecodes 453 // allocate constant pool cache, now that we've seen all the bytecodes
448 make_constant_pool_cache(THREAD); 454 make_constant_pool_cache(THREAD);
459 // stage because it can throw other exceptions, leaving the bytecodes 465 // stage because it can throw other exceptions, leaving the bytecodes
460 // pointing at constant pool cache entries. 466 // pointing at constant pool cache entries.
461 // Link and check jvmti dependencies while we're iterating over the methods. 467 // Link and check jvmti dependencies while we're iterating over the methods.
462 // JSR292 code calls with a different set of methods, so two entry points. 468 // JSR292 code calls with a different set of methods, so two entry points.
463 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) { 469 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) {
464 objArrayHandle methods(THREAD, this_oop->methods()); 470 relocate_and_link(this_oop, this_oop->methods(), THREAD);
465 relocate_and_link(this_oop, methods, THREAD);
466 } 471 }
467 472
468 void Rewriter::relocate_and_link(instanceKlassHandle this_oop, 473 void Rewriter::relocate_and_link(instanceKlassHandle this_oop,
469 objArrayHandle methods, TRAPS) { 474 Array<Method*>* methods, TRAPS) {
470 int len = methods->length(); 475 int len = methods->length();
471 for (int i = len-1; i >= 0; i--) { 476 for (int i = len-1; i >= 0; i--) {
472 methodHandle m(THREAD, (methodOop)methods->obj_at(i)); 477 methodHandle m(THREAD, methods->at(i));
473 478
474 if (m->has_jsrs()) { 479 if (m->has_jsrs()) {
475 m = rewrite_jsrs(m, CHECK); 480 m = rewrite_jsrs(m, CHECK);
476 // Method might have gotten rewritten. 481 // Method might have gotten rewritten.
477 methods->obj_at_put(i, m()); 482 methods->at_put(i, m());
478 } 483 }
479 484
480 // Set up method entry points for compiler and interpreter . 485 // Set up method entry points for compiler and interpreter .
481 m->link_method(m, CHECK); 486 m->link_method(m, CHECK);
482 487
485 if (StressMethodComparator) { 490 if (StressMethodComparator) {
486 static int nmc = 0; 491 static int nmc = 0;
487 for (int j = i; j >= 0 && j >= i-4; j--) { 492 for (int j = i; j >= 0 && j >= i-4; j--) {
488 if ((++nmc % 1000) == 0) tty->print_cr("Have run MethodComparator %d times...", nmc); 493 if ((++nmc % 1000) == 0) tty->print_cr("Have run MethodComparator %d times...", nmc);
489 bool z = MethodComparator::methods_EMCP(m(), 494 bool z = MethodComparator::methods_EMCP(m(),
490 (methodOop)methods->obj_at(j)); 495 methods->at(j));
491 if (j == i && !z) { 496 if (j == i && !z) {
492 tty->print("MethodComparator FAIL: "); m->print(); m->print_codes(); 497 tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
493 assert(z, "method must compare equal to itself"); 498 assert(z, "method must compare equal to itself");
494 } 499 }
495 } 500 }