Mercurial > hg > truffle
annotate src/share/vm/runtime/jniHandles.cpp @ 7666:31540ca73e81
Remove ControlFlowException in SimpleLanguage.
author | Thomas Wuerthinger <thomas.wuerthinger@oracle.com> |
---|---|
date | Fri, 01 Feb 2013 19:53:52 +0100 |
parents | f34d701e952e |
children | 001ec9515f84 |
rev | line source |
---|---|
0 | 1 /* |
6725
da91efe96a93
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
3960
diff
changeset
|
2 * Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved. |
0 | 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * | |
5 * This code is free software; you can redistribute it and/or modify it | |
6 * under the terms of the GNU General Public License version 2 only, as | |
7 * published by the Free Software Foundation. | |
8 * | |
9 * This code is distributed in the hope that it will be useful, but WITHOUT | |
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
12 * version 2 for more details (a copy is included in the LICENSE file that | |
13 * accompanied this code). | |
14 * | |
15 * You should have received a copy of the GNU General Public License version | |
16 * 2 along with this work; if not, write to the Free Software Foundation, | |
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. | |
18 * | |
1552
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1142
diff
changeset
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1142
diff
changeset
|
20 * or visit www.oracle.com if you need additional information or have any |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1142
diff
changeset
|
21 * questions. |
0 | 22 * |
23 */ | |
24 | |
1972 | 25 #include "precompiled.hpp" |
26 #include "classfile/systemDictionary.hpp" | |
27 #include "oops/oop.inline.hpp" | |
2147
9afee0b9fc1d
7012505: BreakpointWithFullGC.sh fails with Internal Error (src/share/vm/oops/methodOop.cpp:220)
kamg
parents:
2125
diff
changeset
|
28 #include "prims/jvmtiExport.hpp" |
1972 | 29 #include "runtime/jniHandles.hpp" |
30 #include "runtime/mutexLocker.hpp" | |
7180
f34d701e952e
8003935: Simplify the needed includes for using Thread::current()
stefank
parents:
6725
diff
changeset
|
31 #include "runtime/thread.inline.hpp" |
0 | 32 |
33 | |
34 JNIHandleBlock* JNIHandles::_global_handles = NULL; | |
35 JNIHandleBlock* JNIHandles::_weak_global_handles = NULL; | |
36 oop JNIHandles::_deleted_handle = NULL; | |
37 | |
38 | |
39 jobject JNIHandles::make_local(oop obj) { | |
40 if (obj == NULL) { | |
41 return NULL; // ignore null handles | |
42 } else { | |
43 Thread* thread = Thread::current(); | |
44 assert(Universe::heap()->is_in_reserved(obj), "sanity check"); | |
45 return thread->active_handles()->allocate_handle(obj); | |
46 } | |
47 } | |
48 | |
49 | |
50 // optimized versions | |
51 | |
52 jobject JNIHandles::make_local(Thread* thread, oop obj) { | |
53 if (obj == NULL) { | |
54 return NULL; // ignore null handles | |
55 } else { | |
56 assert(Universe::heap()->is_in_reserved(obj), "sanity check"); | |
57 return thread->active_handles()->allocate_handle(obj); | |
58 } | |
59 } | |
60 | |
61 | |
62 jobject JNIHandles::make_local(JNIEnv* env, oop obj) { | |
63 if (obj == NULL) { | |
64 return NULL; // ignore null handles | |
65 } else { | |
66 JavaThread* thread = JavaThread::thread_from_jni_environment(env); | |
67 assert(Universe::heap()->is_in_reserved(obj), "sanity check"); | |
68 return thread->active_handles()->allocate_handle(obj); | |
69 } | |
70 } | |
71 | |
72 | |
73 jobject JNIHandles::make_global(Handle obj) { | |
1616
38e8278318ca
6656830: assert((*p)->is_oop(),"expected an oop while scanning weak refs")
never
parents:
1552
diff
changeset
|
74 assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC"); |
0 | 75 jobject res = NULL; |
76 if (!obj.is_null()) { | |
77 // ignore null handles | |
78 MutexLocker ml(JNIGlobalHandle_lock); | |
79 assert(Universe::heap()->is_in_reserved(obj()), "sanity check"); | |
80 res = _global_handles->allocate_handle(obj()); | |
81 } else { | |
82 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops()); | |
83 } | |
84 | |
85 return res; | |
86 } | |
87 | |
88 | |
89 jobject JNIHandles::make_weak_global(Handle obj) { | |
1616
38e8278318ca
6656830: assert((*p)->is_oop(),"expected an oop while scanning weak refs")
never
parents:
1552
diff
changeset
|
90 assert(!Universe::heap()->is_gc_active(), "can't extend the root set during GC"); |
0 | 91 jobject res = NULL; |
92 if (!obj.is_null()) { | |
93 // ignore null handles | |
94 MutexLocker ml(JNIGlobalHandle_lock); | |
95 assert(Universe::heap()->is_in_reserved(obj()), "sanity check"); | |
96 res = _weak_global_handles->allocate_handle(obj()); | |
97 } else { | |
98 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops()); | |
99 } | |
100 return res; | |
101 } | |
102 | |
103 | |
104 void JNIHandles::destroy_global(jobject handle) { | |
105 if (handle != NULL) { | |
106 assert(is_global_handle(handle), "Invalid delete of global JNI handle"); | |
107 *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it | |
108 } | |
109 } | |
110 | |
111 | |
112 void JNIHandles::destroy_weak_global(jobject handle) { | |
113 if (handle != NULL) { | |
114 assert(!CheckJNICalls || is_weak_global_handle(handle), "Invalid delete of weak global JNI handle"); | |
115 *((oop*)handle) = deleted_handle(); // Mark the handle as deleted, allocate will reuse it | |
116 } | |
117 } | |
118 | |
119 | |
120 void JNIHandles::oops_do(OopClosure* f) { | |
121 f->do_oop(&_deleted_handle); | |
122 _global_handles->oops_do(f); | |
123 } | |
124 | |
125 | |
126 void JNIHandles::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) { | |
127 _weak_global_handles->weak_oops_do(is_alive, f); | |
128 } | |
129 | |
130 | |
131 void JNIHandles::initialize() { | |
132 _global_handles = JNIHandleBlock::allocate_block(); | |
133 _weak_global_handles = JNIHandleBlock::allocate_block(); | |
134 EXCEPTION_MARK; | |
135 // We will never reach the CATCH below since Exceptions::_throw will cause | |
136 // the VM to exit if an exception is thrown during initialization | |
6725
da91efe96a93
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
3960
diff
changeset
|
137 Klass* k = SystemDictionary::Object_klass(); |
da91efe96a93
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
3960
diff
changeset
|
138 _deleted_handle = InstanceKlass::cast(k)->allocate_instance(CATCH); |
0 | 139 } |
140 | |
141 | |
142 bool JNIHandles::is_local_handle(Thread* thread, jobject handle) { | |
143 JNIHandleBlock* block = thread->active_handles(); | |
144 | |
145 // Look back past possible native calls to jni_PushLocalFrame. | |
146 while (block != NULL) { | |
147 if (block->chain_contains(handle)) { | |
148 return true; | |
149 } | |
150 block = block->pop_frame_link(); | |
151 } | |
152 return false; | |
153 } | |
154 | |
155 | |
156 // Determine if the handle is somewhere in the current thread's stack. | |
157 // We easily can't isolate any particular stack frame the handle might | |
158 // come from, so we'll check the whole stack. | |
159 | |
160 bool JNIHandles::is_frame_handle(JavaThread* thr, jobject obj) { | |
161 // If there is no java frame, then this must be top level code, such | |
162 // as the java command executable, in which case, this type of handle | |
163 // is not permitted. | |
164 return (thr->has_last_Java_frame() && | |
165 (void*)obj < (void*)thr->stack_base() && | |
166 (void*)obj >= (void*)thr->last_Java_sp()); | |
167 } | |
168 | |
169 | |
170 bool JNIHandles::is_global_handle(jobject handle) { | |
171 return _global_handles->chain_contains(handle); | |
172 } | |
173 | |
174 | |
175 bool JNIHandles::is_weak_global_handle(jobject handle) { | |
176 return _weak_global_handles->chain_contains(handle); | |
177 } | |
178 | |
179 long JNIHandles::global_handle_memory_usage() { | |
180 return _global_handles->memory_usage(); | |
181 } | |
182 | |
183 long JNIHandles::weak_global_handle_memory_usage() { | |
184 return _weak_global_handles->memory_usage(); | |
185 } | |
186 | |
187 | |
188 class AlwaysAliveClosure: public BoolObjectClosure { | |
189 public: | |
190 bool do_object_b(oop obj) { return true; } | |
191 void do_object(oop obj) { assert(false, "Don't call"); } | |
192 }; | |
193 | |
194 class CountHandleClosure: public OopClosure { | |
195 private: | |
196 int _count; | |
197 public: | |
198 CountHandleClosure(): _count(0) {} | |
113
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
0
diff
changeset
|
199 virtual void do_oop(oop* unused) { |
0 | 200 _count++; |
201 } | |
113
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
0
diff
changeset
|
202 virtual void do_oop(narrowOop* unused) { ShouldNotReachHere(); } |
0 | 203 int count() { return _count; } |
204 }; | |
205 | |
206 // We assume this is called at a safepoint: no lock is needed. | |
207 void JNIHandles::print_on(outputStream* st) { | |
208 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint"); | |
209 assert(_global_handles != NULL && _weak_global_handles != NULL, | |
210 "JNIHandles not initialized"); | |
211 | |
212 CountHandleClosure global_handle_count; | |
213 AlwaysAliveClosure always_alive; | |
214 oops_do(&global_handle_count); | |
215 weak_oops_do(&always_alive, &global_handle_count); | |
216 | |
217 st->print_cr("JNI global references: %d", global_handle_count.count()); | |
218 st->cr(); | |
219 st->flush(); | |
220 } | |
221 | |
222 class VerifyHandleClosure: public OopClosure { | |
223 public: | |
113
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
0
diff
changeset
|
224 virtual void do_oop(oop* root) { |
0 | 225 (*root)->verify(); |
226 } | |
113
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
0
diff
changeset
|
227 virtual void do_oop(narrowOop* root) { ShouldNotReachHere(); } |
0 | 228 }; |
229 | |
230 void JNIHandles::verify() { | |
231 VerifyHandleClosure verify_handle; | |
232 AlwaysAliveClosure always_alive; | |
233 | |
234 oops_do(&verify_handle); | |
235 weak_oops_do(&always_alive, &verify_handle); | |
236 } | |
237 | |
238 | |
239 | |
240 void jni_handles_init() { | |
241 JNIHandles::initialize(); | |
242 } | |
243 | |
244 | |
245 int JNIHandleBlock::_blocks_allocated = 0; | |
246 JNIHandleBlock* JNIHandleBlock::_block_free_list = NULL; | |
247 #ifndef PRODUCT | |
248 JNIHandleBlock* JNIHandleBlock::_block_list = NULL; | |
249 #endif | |
250 | |
251 | |
252 void JNIHandleBlock::zap() { | |
253 // Zap block values | |
254 _top = 0; | |
255 for (int index = 0; index < block_size_in_oops; index++) { | |
256 _handles[index] = badJNIHandle; | |
257 } | |
258 } | |
259 | |
260 JNIHandleBlock* JNIHandleBlock::allocate_block(Thread* thread) { | |
261 assert(thread == NULL || thread == Thread::current(), "sanity check"); | |
262 JNIHandleBlock* block; | |
263 // Check the thread-local free list for a block so we don't | |
264 // have to acquire a mutex. | |
265 if (thread != NULL && thread->free_handle_block() != NULL) { | |
266 block = thread->free_handle_block(); | |
267 thread->set_free_handle_block(block->_next); | |
268 } | |
269 else { | |
270 // locking with safepoint checking introduces a potential deadlock: | |
271 // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock | |
272 // - another would hold Threads_lock (jni_AttachCurrentThread) and then | |
273 // JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block) | |
274 MutexLockerEx ml(JNIHandleBlockFreeList_lock, | |
275 Mutex::_no_safepoint_check_flag); | |
276 if (_block_free_list == NULL) { | |
277 // Allocate new block | |
278 block = new JNIHandleBlock(); | |
279 _blocks_allocated++; | |
280 if (TraceJNIHandleAllocation) { | |
281 tty->print_cr("JNIHandleBlock " INTPTR_FORMAT " allocated (%d total blocks)", | |
282 block, _blocks_allocated); | |
283 } | |
284 if (ZapJNIHandleArea) block->zap(); | |
285 #ifndef PRODUCT | |
286 // Link new block to list of all allocated blocks | |
287 block->_block_list_link = _block_list; | |
288 _block_list = block; | |
289 #endif | |
290 } else { | |
291 // Get block from free list | |
292 block = _block_free_list; | |
293 _block_free_list = _block_free_list->_next; | |
294 } | |
295 } | |
296 block->_top = 0; | |
297 block->_next = NULL; | |
298 block->_pop_frame_link = NULL; | |
299 // _last, _free_list & _allocate_before_rebuild initialized in allocate_handle | |
300 debug_only(block->_last = NULL); | |
301 debug_only(block->_free_list = NULL); | |
302 debug_only(block->_allocate_before_rebuild = -1); | |
303 return block; | |
304 } | |
305 | |
306 | |
307 void JNIHandleBlock::release_block(JNIHandleBlock* block, Thread* thread) { | |
308 assert(thread == NULL || thread == Thread::current(), "sanity check"); | |
309 JNIHandleBlock* pop_frame_link = block->pop_frame_link(); | |
310 // Put returned block at the beginning of the thread-local free list. | |
311 // Note that if thread == NULL, we use it as an implicit argument that | |
312 // we _don't_ want the block to be kept on the free_handle_block. | |
313 // See for instance JavaThread::exit(). | |
314 if (thread != NULL ) { | |
315 if (ZapJNIHandleArea) block->zap(); | |
316 JNIHandleBlock* freelist = thread->free_handle_block(); | |
317 block->_pop_frame_link = NULL; | |
318 thread->set_free_handle_block(block); | |
319 | |
320 // Add original freelist to end of chain | |
321 if ( freelist != NULL ) { | |
322 while ( block->_next != NULL ) block = block->_next; | |
323 block->_next = freelist; | |
324 } | |
325 block = NULL; | |
326 } | |
327 if (block != NULL) { | |
328 // Return blocks to free list | |
329 // locking with safepoint checking introduces a potential deadlock: | |
330 // - we would hold JNIHandleBlockFreeList_lock and then Threads_lock | |
331 // - another would hold Threads_lock (jni_AttachCurrentThread) and then | |
332 // JNIHandleBlockFreeList_lock (JNIHandleBlock::allocate_block) | |
333 MutexLockerEx ml(JNIHandleBlockFreeList_lock, | |
334 Mutex::_no_safepoint_check_flag); | |
335 while (block != NULL) { | |
336 if (ZapJNIHandleArea) block->zap(); | |
337 JNIHandleBlock* next = block->_next; | |
338 block->_next = _block_free_list; | |
339 _block_free_list = block; | |
340 block = next; | |
341 } | |
342 } | |
343 if (pop_frame_link != NULL) { | |
344 // As a sanity check we release blocks pointed to by the pop_frame_link. | |
345 // This should never happen (only if PopLocalFrame is not called the | |
346 // correct number of times). | |
347 release_block(pop_frame_link, thread); | |
348 } | |
349 } | |
350 | |
351 | |
352 void JNIHandleBlock::oops_do(OopClosure* f) { | |
353 JNIHandleBlock* current_chain = this; | |
354 // Iterate over chain of blocks, followed by chains linked through the | |
355 // pop frame links. | |
356 while (current_chain != NULL) { | |
357 for (JNIHandleBlock* current = current_chain; current != NULL; | |
358 current = current->_next) { | |
359 assert(current == current_chain || current->pop_frame_link() == NULL, | |
360 "only blocks first in chain should have pop frame link set"); | |
361 for (int index = 0; index < current->_top; index++) { | |
362 oop* root = &(current->_handles)[index]; | |
363 oop value = *root; | |
364 // traverse heap pointers only, not deleted handles or free list | |
365 // pointers | |
366 if (value != NULL && Universe::heap()->is_in_reserved(value)) { | |
367 f->do_oop(root); | |
368 } | |
369 } | |
370 // the next handle block is valid only if current block is full | |
371 if (current->_top < block_size_in_oops) { | |
372 break; | |
373 } | |
374 } | |
375 current_chain = current_chain->pop_frame_link(); | |
376 } | |
377 } | |
378 | |
379 | |
380 void JNIHandleBlock::weak_oops_do(BoolObjectClosure* is_alive, | |
381 OopClosure* f) { | |
382 for (JNIHandleBlock* current = this; current != NULL; current = current->_next) { | |
383 assert(current->pop_frame_link() == NULL, | |
384 "blocks holding weak global JNI handles should not have pop frame link set"); | |
385 for (int index = 0; index < current->_top; index++) { | |
386 oop* root = &(current->_handles)[index]; | |
387 oop value = *root; | |
388 // traverse heap pointers only, not deleted handles or free list pointers | |
389 if (value != NULL && Universe::heap()->is_in_reserved(value)) { | |
390 if (is_alive->do_object_b(value)) { | |
391 // The weakly referenced object is alive, update pointer | |
392 f->do_oop(root); | |
393 } else { | |
394 // The weakly referenced object is not alive, clear the reference by storing NULL | |
395 if (TraceReferenceGC) { | |
396 tty->print_cr("Clearing JNI weak reference (" INTPTR_FORMAT ")", root); | |
397 } | |
398 *root = NULL; | |
399 } | |
400 } | |
401 } | |
402 // the next handle block is valid only if current block is full | |
403 if (current->_top < block_size_in_oops) { | |
404 break; | |
405 } | |
406 } | |
2125
7246a374a9f2
6458402: 3 jvmti tests fail with CMS and +ExplicitGCInvokesConcurrent
kamg
parents:
1972
diff
changeset
|
407 |
7246a374a9f2
6458402: 3 jvmti tests fail with CMS and +ExplicitGCInvokesConcurrent
kamg
parents:
1972
diff
changeset
|
408 /* |
2147
9afee0b9fc1d
7012505: BreakpointWithFullGC.sh fails with Internal Error (src/share/vm/oops/methodOop.cpp:220)
kamg
parents:
2125
diff
changeset
|
409 * JVMTI data structures may also contain weak oops. The iteration of them |
9afee0b9fc1d
7012505: BreakpointWithFullGC.sh fails with Internal Error (src/share/vm/oops/methodOop.cpp:220)
kamg
parents:
2125
diff
changeset
|
410 * is placed here so that we don't need to add it to each of the collectors. |
2125
7246a374a9f2
6458402: 3 jvmti tests fail with CMS and +ExplicitGCInvokesConcurrent
kamg
parents:
1972
diff
changeset
|
411 */ |
2147
9afee0b9fc1d
7012505: BreakpointWithFullGC.sh fails with Internal Error (src/share/vm/oops/methodOop.cpp:220)
kamg
parents:
2125
diff
changeset
|
412 JvmtiExport::weak_oops_do(is_alive, f); |
0 | 413 } |
414 | |
415 | |
416 jobject JNIHandleBlock::allocate_handle(oop obj) { | |
417 assert(Universe::heap()->is_in_reserved(obj), "sanity check"); | |
418 if (_top == 0) { | |
419 // This is the first allocation or the initial block got zapped when | |
420 // entering a native function. If we have any following blocks they are | |
421 // not valid anymore. | |
422 for (JNIHandleBlock* current = _next; current != NULL; | |
423 current = current->_next) { | |
424 assert(current->_last == NULL, "only first block should have _last set"); | |
425 assert(current->_free_list == NULL, | |
426 "only first block should have _free_list set"); | |
427 current->_top = 0; | |
428 if (ZapJNIHandleArea) current->zap(); | |
429 } | |
430 // Clear initial block | |
431 _free_list = NULL; | |
432 _allocate_before_rebuild = 0; | |
433 _last = this; | |
434 if (ZapJNIHandleArea) zap(); | |
435 } | |
436 | |
437 // Try last block | |
438 if (_last->_top < block_size_in_oops) { | |
439 oop* handle = &(_last->_handles)[_last->_top++]; | |
440 *handle = obj; | |
441 return (jobject) handle; | |
442 } | |
443 | |
444 // Try free list | |
445 if (_free_list != NULL) { | |
446 oop* handle = _free_list; | |
447 _free_list = (oop*) *_free_list; | |
448 *handle = obj; | |
449 return (jobject) handle; | |
450 } | |
451 // Check if unused block follow last | |
452 if (_last->_next != NULL) { | |
453 // update last and retry | |
454 _last = _last->_next; | |
455 return allocate_handle(obj); | |
456 } | |
457 | |
458 // No space available, we have to rebuild free list or expand | |
459 if (_allocate_before_rebuild == 0) { | |
460 rebuild_free_list(); // updates _allocate_before_rebuild counter | |
461 } else { | |
462 // Append new block | |
463 Thread* thread = Thread::current(); | |
464 Handle obj_handle(thread, obj); | |
465 // This can block, so we need to preserve obj accross call. | |
466 _last->_next = JNIHandleBlock::allocate_block(thread); | |
467 _last = _last->_next; | |
468 _allocate_before_rebuild--; | |
469 obj = obj_handle(); | |
470 } | |
471 return allocate_handle(obj); // retry | |
472 } | |
473 | |
474 | |
475 void JNIHandleBlock::rebuild_free_list() { | |
476 assert(_allocate_before_rebuild == 0 && _free_list == NULL, "just checking"); | |
477 int free = 0; | |
478 int blocks = 0; | |
479 for (JNIHandleBlock* current = this; current != NULL; current = current->_next) { | |
480 for (int index = 0; index < current->_top; index++) { | |
481 oop* handle = &(current->_handles)[index]; | |
482 if (*handle == JNIHandles::deleted_handle()) { | |
483 // this handle was cleared out by a delete call, reuse it | |
484 *handle = (oop) _free_list; | |
485 _free_list = handle; | |
486 free++; | |
487 } | |
488 } | |
489 // we should not rebuild free list if there are unused handles at the end | |
490 assert(current->_top == block_size_in_oops, "just checking"); | |
491 blocks++; | |
492 } | |
493 // Heuristic: if more than half of the handles are free we rebuild next time | |
494 // as well, otherwise we append a corresponding number of new blocks before | |
495 // attempting a free list rebuild again. | |
496 int total = blocks * block_size_in_oops; | |
497 int extra = total - 2*free; | |
498 if (extra > 0) { | |
499 // Not as many free handles as we would like - compute number of new blocks to append | |
500 _allocate_before_rebuild = (extra + block_size_in_oops - 1) / block_size_in_oops; | |
501 } | |
502 if (TraceJNIHandleAllocation) { | |
503 tty->print_cr("Rebuild free list JNIHandleBlock " INTPTR_FORMAT " blocks=%d used=%d free=%d add=%d", | |
504 this, blocks, total-free, free, _allocate_before_rebuild); | |
505 } | |
506 } | |
507 | |
508 | |
509 bool JNIHandleBlock::contains(jobject handle) const { | |
510 return ((jobject)&_handles[0] <= handle && handle<(jobject)&_handles[_top]); | |
511 } | |
512 | |
513 | |
514 bool JNIHandleBlock::chain_contains(jobject handle) const { | |
515 for (JNIHandleBlock* current = (JNIHandleBlock*) this; current != NULL; current = current->_next) { | |
516 if (current->contains(handle)) { | |
517 return true; | |
518 } | |
519 } | |
520 return false; | |
521 } | |
522 | |
523 | |
524 int JNIHandleBlock::length() const { | |
525 int result = 1; | |
526 for (JNIHandleBlock* current = _next; current != NULL; current = current->_next) { | |
527 result++; | |
528 } | |
529 return result; | |
530 } | |
531 | |
532 // This method is not thread-safe, i.e., must be called whule holding a lock on the | |
533 // structure. | |
534 long JNIHandleBlock::memory_usage() const { | |
535 return length() * sizeof(JNIHandleBlock); | |
536 } | |
537 | |
538 | |
539 #ifndef PRODUCT | |
540 | |
541 bool JNIHandleBlock::any_contains(jobject handle) { | |
542 for (JNIHandleBlock* current = _block_list; current != NULL; current = current->_block_list_link) { | |
543 if (current->contains(handle)) { | |
544 return true; | |
545 } | |
546 } | |
547 return false; | |
548 } | |
549 | |
550 void JNIHandleBlock::print_statistics() { | |
551 int used_blocks = 0; | |
552 int free_blocks = 0; | |
553 int used_handles = 0; | |
554 int free_handles = 0; | |
555 JNIHandleBlock* block = _block_list; | |
556 while (block != NULL) { | |
557 if (block->_top > 0) { | |
558 used_blocks++; | |
559 } else { | |
560 free_blocks++; | |
561 } | |
562 used_handles += block->_top; | |
563 free_handles += (block_size_in_oops - block->_top); | |
564 block = block->_block_list_link; | |
565 } | |
566 tty->print_cr("JNIHandleBlocks statistics"); | |
567 tty->print_cr("- blocks allocated: %d", used_blocks + free_blocks); | |
568 tty->print_cr("- blocks in use: %d", used_blocks); | |
569 tty->print_cr("- blocks free: %d", free_blocks); | |
570 tty->print_cr("- handles in use: %d", used_handles); | |
571 tty->print_cr("- handles free: %d", free_handles); | |
572 } | |
573 | |
574 #endif |