comparison src/share/vm/memory/generation.cpp @ 0:a61af66fc99e jdk7-b24

Initial load
author duke
date Sat, 01 Dec 2007 00:00:00 +0000
parents
children ba764ed4b6f2
comparison
equal deleted inserted replaced
-1:000000000000 0:a61af66fc99e
1 /*
2 * Copyright 1997-2006 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
24
25 # include "incls/_precompiled.incl"
26 # include "incls/_generation.cpp.incl"
27
28 Generation::Generation(ReservedSpace rs, size_t initial_size, int level) :
29 _level(level),
30 _ref_processor(NULL) {
31 if (!_virtual_space.initialize(rs, initial_size)) {
32 vm_exit_during_initialization("Could not reserve enough space for "
33 "object heap");
34 }
35 _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(),
36 (HeapWord*)_virtual_space.high_boundary());
37 }
38
39 GenerationSpec* Generation::spec() {
40 GenCollectedHeap* gch = GenCollectedHeap::heap();
41 assert(0 <= level() && level() < gch->_n_gens, "Bad gen level");
42 return gch->_gen_specs[level()];
43 }
44
45 size_t Generation::max_capacity() const {
46 return reserved().byte_size();
47 }
48
49 void Generation::print_heap_change(size_t prev_used) const {
50 if (PrintGCDetails && Verbose) {
51 gclog_or_tty->print(" " SIZE_FORMAT
52 "->" SIZE_FORMAT
53 "(" SIZE_FORMAT ")",
54 prev_used, used(), capacity());
55 } else {
56 gclog_or_tty->print(" " SIZE_FORMAT "K"
57 "->" SIZE_FORMAT "K"
58 "(" SIZE_FORMAT "K)",
59 prev_used / K, used() / K, capacity() / K);
60 }
61 }
62
63 // By default we get a single threaded default reference processor;
64 // generations needing multi-threaded refs discovery override this method.
65 void Generation::ref_processor_init() {
66 assert(_ref_processor == NULL, "a reference processor already exists");
67 assert(!_reserved.is_empty(), "empty generation?");
68 _ref_processor =
69 new ReferenceProcessor(_reserved, // span
70 refs_discovery_is_atomic(), // atomic_discovery
71 refs_discovery_is_mt()); // mt_discovery
72 if (_ref_processor == NULL) {
73 vm_exit_during_initialization("Could not allocate ReferenceProcessor object");
74 }
75 }
76
77 void Generation::print() const { print_on(tty); }
78
79 void Generation::print_on(outputStream* st) const {
80 st->print(" %-20s", name());
81 st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K",
82 capacity()/K, used()/K);
83 st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT ")",
84 _virtual_space.low_boundary(),
85 _virtual_space.high(),
86 _virtual_space.high_boundary());
87 }
88
89 void Generation::print_summary_info() { print_summary_info_on(tty); }
90
91 void Generation::print_summary_info_on(outputStream* st) {
92 StatRecord* sr = stat_record();
93 double time = sr->accumulated_time.seconds();
94 st->print_cr("[Accumulated GC generation %d time %3.7f secs, "
95 "%d GC's, avg GC time %3.7f]",
96 level(), time, sr->invocations,
97 sr->invocations > 0 ? time / sr->invocations : 0.0);
98 }
99
100 // Utility iterator classes
101
102 class GenerationIsInReservedClosure : public SpaceClosure {
103 public:
104 const void* _p;
105 Space* sp;
106 virtual void do_space(Space* s) {
107 if (sp == NULL) {
108 if (s->is_in_reserved(_p)) sp = s;
109 }
110 }
111 GenerationIsInReservedClosure(const void* p) : _p(p), sp(NULL) {}
112 };
113
114 class GenerationIsInClosure : public SpaceClosure {
115 public:
116 const void* _p;
117 Space* sp;
118 virtual void do_space(Space* s) {
119 if (sp == NULL) {
120 if (s->is_in(_p)) sp = s;
121 }
122 }
123 GenerationIsInClosure(const void* p) : _p(p), sp(NULL) {}
124 };
125
126 bool Generation::is_in(const void* p) const {
127 GenerationIsInClosure blk(p);
128 ((Generation*)this)->space_iterate(&blk);
129 return blk.sp != NULL;
130 }
131
132 DefNewGeneration* Generation::as_DefNewGeneration() {
133 assert((kind() == Generation::DefNew) ||
134 (kind() == Generation::ParNew) ||
135 (kind() == Generation::ASParNew),
136 "Wrong youngest generation type");
137 return (DefNewGeneration*) this;
138 }
139
140 Generation* Generation::next_gen() const {
141 GenCollectedHeap* gch = GenCollectedHeap::heap();
142 int next = level() + 1;
143 if (next < gch->_n_gens) {
144 return gch->_gens[next];
145 } else {
146 return NULL;
147 }
148 }
149
150 size_t Generation::max_contiguous_available() const {
151 // The largest number of contiguous free words in this or any higher generation.
152 size_t max = 0;
153 for (const Generation* gen = this; gen != NULL; gen = gen->next_gen()) {
154 size_t avail = gen->contiguous_available();
155 if (avail > max) {
156 max = avail;
157 }
158 }
159 return max;
160 }
161
162 bool Generation::promotion_attempt_is_safe(size_t promotion_in_bytes,
163 bool not_used) const {
164 if (PrintGC && Verbose) {
165 gclog_or_tty->print_cr("Generation::promotion_attempt_is_safe"
166 " contiguous_available: " SIZE_FORMAT
167 " promotion_in_bytes: " SIZE_FORMAT,
168 max_contiguous_available(), promotion_in_bytes);
169 }
170 return max_contiguous_available() >= promotion_in_bytes;
171 }
172
173 // Ignores "ref" and calls allocate().
174 oop Generation::promote(oop obj, size_t obj_size, oop* ref) {
175 assert(obj_size == (size_t)obj->size(), "bad obj_size passed in");
176
177 #ifndef PRODUCT
178 if (Universe::heap()->promotion_should_fail()) {
179 return NULL;
180 }
181 #endif // #ifndef PRODUCT
182
183 HeapWord* result = allocate(obj_size, false);
184 if (result != NULL) {
185 Copy::aligned_disjoint_words((HeapWord*)obj, result, obj_size);
186 return oop(result);
187 } else {
188 GenCollectedHeap* gch = GenCollectedHeap::heap();
189 return gch->handle_failed_promotion(this, obj, obj_size, ref);
190 }
191 }
192
193 oop Generation::par_promote(int thread_num,
194 oop obj, markOop m, size_t word_sz) {
195 // Could do a bad general impl here that gets a lock. But no.
196 ShouldNotCallThis();
197 return NULL;
198 }
199
200 void Generation::par_promote_alloc_undo(int thread_num,
201 HeapWord* obj, size_t word_sz) {
202 // Could do a bad general impl here that gets a lock. But no.
203 guarantee(false, "No good general implementation.");
204 }
205
206 Space* Generation::space_containing(const void* p) const {
207 GenerationIsInReservedClosure blk(p);
208 // Cast away const
209 ((Generation*)this)->space_iterate(&blk);
210 return blk.sp;
211 }
212
213 // Some of these are mediocre general implementations. Should be
214 // overridden to get better performance.
215
216 class GenerationBlockStartClosure : public SpaceClosure {
217 public:
218 const void* _p;
219 HeapWord* _start;
220 virtual void do_space(Space* s) {
221 if (_start == NULL && s->is_in_reserved(_p)) {
222 _start = s->block_start(_p);
223 }
224 }
225 GenerationBlockStartClosure(const void* p) { _p = p; _start = NULL; }
226 };
227
228 HeapWord* Generation::block_start(const void* p) const {
229 GenerationBlockStartClosure blk(p);
230 // Cast away const
231 ((Generation*)this)->space_iterate(&blk);
232 return blk._start;
233 }
234
235 class GenerationBlockSizeClosure : public SpaceClosure {
236 public:
237 const HeapWord* _p;
238 size_t size;
239 virtual void do_space(Space* s) {
240 if (size == 0 && s->is_in_reserved(_p)) {
241 size = s->block_size(_p);
242 }
243 }
244 GenerationBlockSizeClosure(const HeapWord* p) { _p = p; size = 0; }
245 };
246
247 size_t Generation::block_size(const HeapWord* p) const {
248 GenerationBlockSizeClosure blk(p);
249 // Cast away const
250 ((Generation*)this)->space_iterate(&blk);
251 assert(blk.size > 0, "seems reasonable");
252 return blk.size;
253 }
254
255 class GenerationBlockIsObjClosure : public SpaceClosure {
256 public:
257 const HeapWord* _p;
258 bool is_obj;
259 virtual void do_space(Space* s) {
260 if (!is_obj && s->is_in_reserved(_p)) {
261 is_obj |= s->block_is_obj(_p);
262 }
263 }
264 GenerationBlockIsObjClosure(const HeapWord* p) { _p = p; is_obj = false; }
265 };
266
267 bool Generation::block_is_obj(const HeapWord* p) const {
268 GenerationBlockIsObjClosure blk(p);
269 // Cast away const
270 ((Generation*)this)->space_iterate(&blk);
271 return blk.is_obj;
272 }
273
274 class GenerationOopIterateClosure : public SpaceClosure {
275 public:
276 OopClosure* cl;
277 MemRegion mr;
278 virtual void do_space(Space* s) {
279 s->oop_iterate(mr, cl);
280 }
281 GenerationOopIterateClosure(OopClosure* _cl, MemRegion _mr) :
282 cl(_cl), mr(_mr) {}
283 };
284
285 void Generation::oop_iterate(OopClosure* cl) {
286 GenerationOopIterateClosure blk(cl, _reserved);
287 space_iterate(&blk);
288 }
289
290 void Generation::oop_iterate(MemRegion mr, OopClosure* cl) {
291 GenerationOopIterateClosure blk(cl, mr);
292 space_iterate(&blk);
293 }
294
295 void Generation::younger_refs_in_space_iterate(Space* sp,
296 OopsInGenClosure* cl) {
297 GenRemSet* rs = SharedHeap::heap()->rem_set();
298 rs->younger_refs_in_space_iterate(sp, cl);
299 }
300
301 class GenerationObjIterateClosure : public SpaceClosure {
302 private:
303 ObjectClosure* _cl;
304 public:
305 virtual void do_space(Space* s) {
306 s->object_iterate(_cl);
307 }
308 GenerationObjIterateClosure(ObjectClosure* cl) : _cl(cl) {}
309 };
310
311 void Generation::object_iterate(ObjectClosure* cl) {
312 GenerationObjIterateClosure blk(cl);
313 space_iterate(&blk);
314 }
315
316 void Generation::prepare_for_compaction(CompactPoint* cp) {
317 // Generic implementation, can be specialized
318 CompactibleSpace* space = first_compaction_space();
319 while (space != NULL) {
320 space->prepare_for_compaction(cp);
321 space = space->next_compaction_space();
322 }
323 }
324
325 class AdjustPointersClosure: public SpaceClosure {
326 public:
327 void do_space(Space* sp) {
328 sp->adjust_pointers();
329 }
330 };
331
332 void Generation::adjust_pointers() {
333 // Note that this is done over all spaces, not just the compactible
334 // ones.
335 AdjustPointersClosure blk;
336 space_iterate(&blk, true);
337 }
338
339 void Generation::compact() {
340 CompactibleSpace* sp = first_compaction_space();
341 while (sp != NULL) {
342 sp->compact();
343 sp = sp->next_compaction_space();
344 }
345 }
346
347 CardGeneration::CardGeneration(ReservedSpace rs, size_t initial_byte_size,
348 int level,
349 GenRemSet* remset) :
350 Generation(rs, initial_byte_size, level), _rs(remset)
351 {
352 HeapWord* start = (HeapWord*)rs.base();
353 size_t reserved_byte_size = rs.size();
354 assert((uintptr_t(start) & 3) == 0, "bad alignment");
355 assert((reserved_byte_size & 3) == 0, "bad alignment");
356 MemRegion reserved_mr(start, heap_word_size(reserved_byte_size));
357 _bts = new BlockOffsetSharedArray(reserved_mr,
358 heap_word_size(initial_byte_size));
359 MemRegion committed_mr(start, heap_word_size(initial_byte_size));
360 _rs->resize_covered_region(committed_mr);
361 if (_bts == NULL)
362 vm_exit_during_initialization("Could not allocate a BlockOffsetArray");
363
364 // Verify that the start and end of this generation is the start of a card.
365 // If this wasn't true, a single card could span more than on generation,
366 // which would cause problems when we commit/uncommit memory, and when we
367 // clear and dirty cards.
368 guarantee(_rs->is_aligned(reserved_mr.start()), "generation must be card aligned");
369 if (reserved_mr.end() != Universe::heap()->reserved_region().end()) {
370 // Don't check at the very end of the heap as we'll assert that we're probing off
371 // the end if we try.
372 guarantee(_rs->is_aligned(reserved_mr.end()), "generation must be card aligned");
373 }
374 }
375
376
377 // No young generation references, clear this generation's cards.
378 void CardGeneration::clear_remembered_set() {
379 _rs->clear(reserved());
380 }
381
382
383 // Objects in this generation may have moved, invalidate this
384 // generation's cards.
385 void CardGeneration::invalidate_remembered_set() {
386 _rs->invalidate(used_region());
387 }
388
389
390 // Currently nothing to do.
391 void CardGeneration::prepare_for_verify() {}
392
393
394 void OneContigSpaceCardGeneration::collect(bool full,
395 bool clear_all_soft_refs,
396 size_t size,
397 bool is_tlab) {
398 SpecializationStats::clear();
399 // Temporarily expand the span of our ref processor, so
400 // refs discovery is over the entire heap, not just this generation
401 ReferenceProcessorSpanMutator
402 x(ref_processor(), GenCollectedHeap::heap()->reserved_region());
403 GenMarkSweep::invoke_at_safepoint(_level, ref_processor(), clear_all_soft_refs);
404 SpecializationStats::print();
405 }
406
407 HeapWord*
408 OneContigSpaceCardGeneration::expand_and_allocate(size_t word_size,
409 bool is_tlab,
410 bool parallel) {
411 assert(!is_tlab, "OneContigSpaceCardGeneration does not support TLAB allocation");
412 if (parallel) {
413 MutexLocker x(ParGCRareEvent_lock);
414 HeapWord* result = NULL;
415 size_t byte_size = word_size * HeapWordSize;
416 while (true) {
417 expand(byte_size, _min_heap_delta_bytes);
418 if (GCExpandToAllocateDelayMillis > 0) {
419 os::sleep(Thread::current(), GCExpandToAllocateDelayMillis, false);
420 }
421 result = _the_space->par_allocate(word_size);
422 if ( result != NULL) {
423 return result;
424 } else {
425 // If there's not enough expansion space available, give up.
426 if (_virtual_space.uncommitted_size() < byte_size) {
427 return NULL;
428 }
429 // else try again
430 }
431 }
432 } else {
433 expand(word_size*HeapWordSize, _min_heap_delta_bytes);
434 return _the_space->allocate(word_size);
435 }
436 }
437
438 void OneContigSpaceCardGeneration::expand(size_t bytes, size_t expand_bytes) {
439 GCMutexLocker x(ExpandHeap_lock);
440 size_t aligned_bytes = ReservedSpace::page_align_size_up(bytes);
441 size_t aligned_expand_bytes = ReservedSpace::page_align_size_up(expand_bytes);
442 bool success = false;
443 if (aligned_expand_bytes > aligned_bytes) {
444 success = grow_by(aligned_expand_bytes);
445 }
446 if (!success) {
447 success = grow_by(aligned_bytes);
448 }
449 if (!success) {
450 grow_to_reserved();
451 }
452 if (GC_locker::is_active()) {
453 if (PrintGC && Verbose) {
454 gclog_or_tty->print_cr("Garbage collection disabled, expanded heap instead");
455 }
456 }
457 }
458
459
460 void OneContigSpaceCardGeneration::shrink(size_t bytes) {
461 assert_locked_or_safepoint(ExpandHeap_lock);
462 size_t size = ReservedSpace::page_align_size_down(bytes);
463 if (size > 0) {
464 shrink_by(size);
465 }
466 }
467
468
469 size_t OneContigSpaceCardGeneration::capacity() const {
470 return _the_space->capacity();
471 }
472
473
474 size_t OneContigSpaceCardGeneration::used() const {
475 return _the_space->used();
476 }
477
478
479 size_t OneContigSpaceCardGeneration::free() const {
480 return _the_space->free();
481 }
482
483 MemRegion OneContigSpaceCardGeneration::used_region() const {
484 return the_space()->used_region();
485 }
486
487 size_t OneContigSpaceCardGeneration::unsafe_max_alloc_nogc() const {
488 return _the_space->free();
489 }
490
491 size_t OneContigSpaceCardGeneration::contiguous_available() const {
492 return _the_space->free() + _virtual_space.uncommitted_size();
493 }
494
495 bool OneContigSpaceCardGeneration::grow_by(size_t bytes) {
496 assert_locked_or_safepoint(ExpandHeap_lock);
497 bool result = _virtual_space.expand_by(bytes);
498 if (result) {
499 size_t new_word_size =
500 heap_word_size(_virtual_space.committed_size());
501 MemRegion mr(_the_space->bottom(), new_word_size);
502 // Expand card table
503 Universe::heap()->barrier_set()->resize_covered_region(mr);
504 // Expand shared block offset array
505 _bts->resize(new_word_size);
506
507 // Fix for bug #4668531
508 MemRegion mangle_region(_the_space->end(), (HeapWord*)_virtual_space.high());
509 _the_space->mangle_region(mangle_region);
510
511 // Expand space -- also expands space's BOT
512 // (which uses (part of) shared array above)
513 _the_space->set_end((HeapWord*)_virtual_space.high());
514
515 // update the space and generation capacity counters
516 update_counters();
517
518 if (Verbose && PrintGC) {
519 size_t new_mem_size = _virtual_space.committed_size();
520 size_t old_mem_size = new_mem_size - bytes;
521 gclog_or_tty->print_cr("Expanding %s from " SIZE_FORMAT "K by "
522 SIZE_FORMAT "K to " SIZE_FORMAT "K",
523 name(), old_mem_size/K, bytes/K, new_mem_size/K);
524 }
525 }
526 return result;
527 }
528
529
530 bool OneContigSpaceCardGeneration::grow_to_reserved() {
531 assert_locked_or_safepoint(ExpandHeap_lock);
532 bool success = true;
533 const size_t remaining_bytes = _virtual_space.uncommitted_size();
534 if (remaining_bytes > 0) {
535 success = grow_by(remaining_bytes);
536 DEBUG_ONLY(if (!success) warning("grow to reserved failed");)
537 }
538 return success;
539 }
540
541 void OneContigSpaceCardGeneration::shrink_by(size_t bytes) {
542 assert_locked_or_safepoint(ExpandHeap_lock);
543 // Shrink committed space
544 _virtual_space.shrink_by(bytes);
545 // Shrink space; this also shrinks the space's BOT
546 _the_space->set_end((HeapWord*) _virtual_space.high());
547 size_t new_word_size = heap_word_size(_the_space->capacity());
548 // Shrink the shared block offset array
549 _bts->resize(new_word_size);
550 MemRegion mr(_the_space->bottom(), new_word_size);
551 // Shrink the card table
552 Universe::heap()->barrier_set()->resize_covered_region(mr);
553
554 if (Verbose && PrintGC) {
555 size_t new_mem_size = _virtual_space.committed_size();
556 size_t old_mem_size = new_mem_size + bytes;
557 gclog_or_tty->print_cr("Shrinking %s from " SIZE_FORMAT "K to " SIZE_FORMAT "K",
558 name(), old_mem_size/K, new_mem_size/K);
559 }
560 }
561
562 // Currently nothing to do.
563 void OneContigSpaceCardGeneration::prepare_for_verify() {}
564
565
566 void OneContigSpaceCardGeneration::object_iterate(ObjectClosure* blk) {
567 _the_space->object_iterate(blk);
568 }
569
570 void OneContigSpaceCardGeneration::space_iterate(SpaceClosure* blk,
571 bool usedOnly) {
572 blk->do_space(_the_space);
573 }
574
575 void OneContigSpaceCardGeneration::object_iterate_since_last_GC(ObjectClosure* blk) {
576 // Deal with delayed initialization of _the_space,
577 // and lack of initialization of _last_gc.
578 if (_last_gc.space() == NULL) {
579 assert(the_space() != NULL, "shouldn't be NULL");
580 _last_gc = the_space()->bottom_mark();
581 }
582 the_space()->object_iterate_from(_last_gc, blk);
583 }
584
585 void OneContigSpaceCardGeneration::younger_refs_iterate(OopsInGenClosure* blk) {
586 blk->set_generation(this);
587 younger_refs_in_space_iterate(_the_space, blk);
588 blk->reset_generation();
589 }
590
591 void OneContigSpaceCardGeneration::save_marks() {
592 _the_space->set_saved_mark();
593 }
594
595
596 void OneContigSpaceCardGeneration::reset_saved_marks() {
597 _the_space->reset_saved_mark();
598 }
599
600
601 bool OneContigSpaceCardGeneration::no_allocs_since_save_marks() {
602 return _the_space->saved_mark_at_top();
603 }
604
605 #define OneContig_SINCE_SAVE_MARKS_ITERATE_DEFN(OopClosureType, nv_suffix) \
606 \
607 void OneContigSpaceCardGeneration:: \
608 oop_since_save_marks_iterate##nv_suffix(OopClosureType* blk) { \
609 blk->set_generation(this); \
610 _the_space->oop_since_save_marks_iterate##nv_suffix(blk); \
611 blk->reset_generation(); \
612 save_marks(); \
613 }
614
615 ALL_SINCE_SAVE_MARKS_CLOSURES(OneContig_SINCE_SAVE_MARKS_ITERATE_DEFN)
616
617 #undef OneContig_SINCE_SAVE_MARKS_ITERATE_DEFN
618
619
620 void OneContigSpaceCardGeneration::gc_epilogue(bool full) {
621 _last_gc = WaterMark(the_space(), the_space()->top());
622
623 // update the generation and space performance counters
624 update_counters();
625 }
626
627 void OneContigSpaceCardGeneration::verify(bool allow_dirty) {
628 the_space()->verify(allow_dirty);
629 }
630
631 void OneContigSpaceCardGeneration::print_on(outputStream* st) const {
632 Generation::print_on(st);
633 st->print(" the");
634 the_space()->print_on(st);
635 }