comparison src/share/vm/memory/space.cpp @ 342:37f87013dfd8

6711316: Open source the Garbage-First garbage collector Summary: First mercurial integration of the code for the Garbage-First garbage collector. Reviewed-by: apetrusenko, iveresov, jmasa, sgoldman, tonyp, ysr
author ysr
date Thu, 05 Jun 2008 15:57:56 -0700
parents ba764ed4b6f2
children 6aae2f9d0294
comparison
equal deleted inserted replaced
189:0b27f3512f9e 342:37f87013dfd8
103 assert(_precision == CardTableModRefBS::ObjHeadPreciseArray || 103 assert(_precision == CardTableModRefBS::ObjHeadPreciseArray ||
104 _precision == CardTableModRefBS::Precise, 104 _precision == CardTableModRefBS::Precise,
105 "Only ones we deal with for now."); 105 "Only ones we deal with for now.");
106 106
107 assert(_precision != CardTableModRefBS::ObjHeadPreciseArray || 107 assert(_precision != CardTableModRefBS::ObjHeadPreciseArray ||
108 _last_bottom == NULL || 108 _cl->idempotent() || _last_bottom == NULL ||
109 top <= _last_bottom, 109 top <= _last_bottom,
110 "Not decreasing"); 110 "Not decreasing");
111 NOT_PRODUCT(_last_bottom = mr.start()); 111 NOT_PRODUCT(_last_bottom = mr.start());
112 112
113 bottom_obj = _sp->block_start(bottom); 113 bottom_obj = _sp->block_start(bottom);
142 // Walk the region if it is not empty; otherwise there is nothing to do. 142 // Walk the region if it is not empty; otherwise there is nothing to do.
143 if (!mr.is_empty()) { 143 if (!mr.is_empty()) {
144 walk_mem_region(mr, bottom_obj, top); 144 walk_mem_region(mr, bottom_obj, top);
145 } 145 }
146 146
147 _min_done = bottom; 147 // An idempotent closure might be applied in any order, so we don't
148 // record a _min_done for it.
149 if (!_cl->idempotent()) {
150 _min_done = bottom;
151 } else {
152 assert(_min_done == _last_explicit_min_done,
153 "Don't update _min_done for idempotent cl");
154 }
148 } 155 }
149 156
150 DirtyCardToOopClosure* Space::new_dcto_cl(OopClosure* cl, 157 DirtyCardToOopClosure* Space::new_dcto_cl(OopClosure* cl,
151 CardTableModRefBS::PrecisionStyle precision, 158 CardTableModRefBS::PrecisionStyle precision,
152 HeapWord* boundary) { 159 HeapWord* boundary) {
230 CardTableModRefBS::PrecisionStyle precision, 237 CardTableModRefBS::PrecisionStyle precision,
231 HeapWord* boundary) { 238 HeapWord* boundary) {
232 return new ContiguousSpaceDCTOC(this, cl, precision, boundary); 239 return new ContiguousSpaceDCTOC(this, cl, precision, boundary);
233 } 240 }
234 241
235 void Space::initialize(MemRegion mr, bool clear_space) { 242 void Space::set_bounds(MemRegion mr) {
236 HeapWord* bottom = mr.start(); 243 HeapWord* bottom = mr.start();
237 HeapWord* end = mr.end(); 244 HeapWord* end = mr.end();
238 assert(Universe::on_page_boundary(bottom) && Universe::on_page_boundary(end), 245 assert(Universe::on_page_boundary(bottom) && Universe::on_page_boundary(end),
239 "invalid space boundaries"); 246 "invalid space boundaries");
240 set_bottom(bottom); 247 set_bottom(bottom);
241 set_end(end); 248 set_end(end);
249 }
250
251 void Space::initialize(MemRegion mr, bool clear_space) {
252 set_bounds(mr);
242 if (clear_space) clear(); 253 if (clear_space) clear();
243 } 254 }
244 255
245 void Space::clear() { 256 void Space::clear() {
246 if (ZapUnusedHeapArea) mangle_unused_area(); 257 if (ZapUnusedHeapArea) mangle_unused_area();
247 } 258 }
248 259
249 void ContiguousSpace::initialize(MemRegion mr, bool clear_space) 260 void CompactibleSpace::initialize(MemRegion mr, bool clear_space) {
250 { 261 Space::initialize(mr, false); // We'll do the clearing if there's
251 CompactibleSpace::initialize(mr, clear_space); 262 // clearing to be done.
252 _concurrent_iteration_safe_limit = top(); 263 _compaction_top = bottom();
264 _next_compaction_space = NULL;
265 if (clear_space) clear();
266 }
267
268 void CompactibleSpace::clear() {
269 _compaction_top = bottom();
270 Space::clear();
271 }
272
273 void ContiguousSpace::initialize(MemRegion mr, bool clear_space) {
274 CompactibleSpace::initialize(mr, false); // We'll do the clearing if there's
275 // clearing to be done.
276 set_top(bottom());
277 set_saved_mark();
278 if (clear_space) clear();
253 } 279 }
254 280
255 void ContiguousSpace::clear() { 281 void ContiguousSpace::clear() {
256 set_top(bottom()); 282 set_top(bottom());
257 set_saved_mark(); 283 set_saved_mark();
258 Space::clear(); 284 CompactibleSpace::clear();
259 } 285 }
260 286
261 bool Space::is_in(const void* p) const { 287 bool Space::is_in(const void* p) const {
262 HeapWord* b = block_start(p); 288 HeapWord* b = block_start_const(p);
263 return b != NULL && block_is_obj(b); 289 return b != NULL && block_is_obj(b);
264 } 290 }
265 291
266 bool ContiguousSpace::is_in(const void* p) const { 292 bool ContiguousSpace::is_in(const void* p) const {
267 return _bottom <= p && p < _top; 293 return _bottom <= p && p < _top;
269 295
270 bool ContiguousSpace::is_free_block(const HeapWord* p) const { 296 bool ContiguousSpace::is_free_block(const HeapWord* p) const {
271 return p >= _top; 297 return p >= _top;
272 } 298 }
273 299
300 void OffsetTableContigSpace::initialize(MemRegion mr, bool clear_space) {
301 // false ==> we'll do the clearing if there's clearing to be done.
302 ContiguousSpace::initialize(mr, false);
303 _offsets.zero_bottom_entry();
304 _offsets.initialize_threshold();
305 if (clear_space) clear();
306 }
307
274 void OffsetTableContigSpace::clear() { 308 void OffsetTableContigSpace::clear() {
275 ContiguousSpace::clear(); 309 ContiguousSpace::clear();
310 _offsets.zero_bottom_entry();
276 _offsets.initialize_threshold(); 311 _offsets.initialize_threshold();
277 } 312 }
278 313
279 void OffsetTableContigSpace::set_bottom(HeapWord* new_bottom) { 314 void OffsetTableContigSpace::set_bottom(HeapWord* new_bottom) {
280 Space::set_bottom(new_bottom); 315 Space::set_bottom(new_bottom);
293 mangle_region(MemRegion(top(), end())); 328 mangle_region(MemRegion(top(), end()));
294 } 329 }
295 330
296 void ContiguousSpace::mangle_region(MemRegion mr) { 331 void ContiguousSpace::mangle_region(MemRegion mr) {
297 debug_only(Copy::fill_to_words(mr.start(), mr.word_size(), badHeapWord)); 332 debug_only(Copy::fill_to_words(mr.start(), mr.word_size(), badHeapWord));
298 }
299
300 void CompactibleSpace::initialize(MemRegion mr, bool clear_space) {
301 Space::initialize(mr, clear_space);
302 _compaction_top = bottom();
303 _next_compaction_space = NULL;
304 } 333 }
305 334
306 HeapWord* CompactibleSpace::forward(oop q, size_t size, 335 HeapWord* CompactibleSpace::forward(oop q, size_t size,
307 CompactPoint* cp, HeapWord* compact_top) { 336 CompactPoint* cp, HeapWord* compact_top) {
308 // q is alive 337 // q is alive
475 prev_p = p; 504 prev_p = p;
476 p += oop(p)->size(); 505 p += oop(p)->size();
477 } 506 }
478 guarantee(p == top(), "end of last object must match end of space"); 507 guarantee(p == top(), "end of last object must match end of space");
479 if (top() != end()) { 508 if (top() != end()) {
480 guarantee(top() == block_start(end()-1) && 509 guarantee(top() == block_start_const(end()-1) &&
481 top() == block_start(top()), 510 top() == block_start_const(top()),
482 "top should be start of unallocated block, if it exists"); 511 "top should be start of unallocated block, if it exists");
483 } 512 }
484 } 513 }
485 514
486 void Space::oop_iterate(OopClosure* blk) { 515 void Space::oop_iterate(OopClosure* blk) {
708 ALL_SINCE_SAVE_MARKS_CLOSURES(ContigSpace_OOP_SINCE_SAVE_MARKS_DEFN) 737 ALL_SINCE_SAVE_MARKS_CLOSURES(ContigSpace_OOP_SINCE_SAVE_MARKS_DEFN)
709 738
710 #undef ContigSpace_OOP_SINCE_SAVE_MARKS_DEFN 739 #undef ContigSpace_OOP_SINCE_SAVE_MARKS_DEFN
711 740
712 // Very general, slow implementation. 741 // Very general, slow implementation.
713 HeapWord* ContiguousSpace::block_start(const void* p) const { 742 HeapWord* ContiguousSpace::block_start_const(const void* p) const {
714 assert(MemRegion(bottom(), end()).contains(p), "p not in space"); 743 assert(MemRegion(bottom(), end()).contains(p), "p not in space");
715 if (p >= top()) { 744 if (p >= top()) {
716 return top(); 745 return top();
717 } else { 746 } else {
718 HeapWord* last = bottom(); 747 HeapWord* last = bottom();
911 while (p < top()) { 940 while (p < top()) {
912 size_t size = oop(p)->size(); 941 size_t size = oop(p)->size();
913 // For a sampling of objects in the space, find it using the 942 // For a sampling of objects in the space, find it using the
914 // block offset table. 943 // block offset table.
915 if (blocks == BLOCK_SAMPLE_INTERVAL) { 944 if (blocks == BLOCK_SAMPLE_INTERVAL) {
916 guarantee(p == block_start(p + (size/2)), "check offset computation"); 945 guarantee(p == block_start_const(p + (size/2)),
946 "check offset computation");
917 blocks = 0; 947 blocks = 0;
918 } else { 948 } else {
919 blocks++; 949 blocks++;
920 } 950 }
921 951