comparison src/share/vm/services/memSnapshot.hpp @ 6979:fb3190e77d3c

8001592: NMT: assertion failed: assert(_amount >= amt) failed: Just check: memBaseline.hpp:180 Summary: Fixed NMT that miscounted arena memory when it is used as value or stack object. Reviewed-by: acorn, coleenp
author zgu
date Fri, 09 Nov 2012 19:24:31 -0500
parents 716c64bda5ba
children 49cbd3e25ba9
comparison
equal deleted inserted replaced
6938:8940ddc1036f 6979:fb3190e77d3c
28 #include "memory/allocation.hpp" 28 #include "memory/allocation.hpp"
29 #include "runtime/mutex.hpp" 29 #include "runtime/mutex.hpp"
30 #include "runtime/mutexLocker.hpp" 30 #include "runtime/mutexLocker.hpp"
31 #include "services/memBaseline.hpp" 31 #include "services/memBaseline.hpp"
32 #include "services/memPtrArray.hpp" 32 #include "services/memPtrArray.hpp"
33
34 33
35 // Snapshot pointer array iterator 34 // Snapshot pointer array iterator
36 35
37 // The pointer array contains malloc-ed pointers 36 // The pointer array contains malloc-ed pointers
38 class MemPointerIterator : public MemPointerArrayIteratorImpl { 37 class MemPointerIterator : public MemPointerArrayIteratorImpl {
163 } 162 }
164 #endif 163 #endif
165 }; 164 };
166 165
167 class MallocRecordIterator : public MemPointerArrayIterator { 166 class MallocRecordIterator : public MemPointerArrayIterator {
168 protected: 167 private:
169 MemPointerArrayIteratorImpl _itr; 168 MemPointerArrayIteratorImpl _itr;
170 169
170
171
171 public: 172 public:
172 MallocRecordIterator(MemPointerArray* arr) : _itr(arr) { 173 MallocRecordIterator(MemPointerArray* arr) : _itr(arr) {
173 } 174 }
174 175
175 virtual MemPointer* current() const { 176 virtual MemPointer* current() const {
176 MemPointerRecord* cur = (MemPointerRecord*)_itr.current(); 177 #ifdef ASSERT
177 assert(cur == NULL || !cur->is_vm_pointer(), "seek error"); 178 MemPointer* cur_rec = _itr.current();
178 MemPointerRecord* next = (MemPointerRecord*)_itr.peek_next(); 179 if (cur_rec != NULL) {
179 if (next == NULL || next->addr() != cur->addr()) { 180 MemPointer* prev_rec = _itr.peek_prev();
180 return cur; 181 MemPointer* next_rec = _itr.peek_next();
181 } else { 182 assert(prev_rec == NULL || prev_rec->addr() < cur_rec->addr(), "Sorting order");
182 assert(!cur->is_vm_pointer(), "Sanity check"); 183 assert(next_rec == NULL || next_rec->addr() > cur_rec->addr(), "Sorting order");
183 assert(cur->is_allocation_record() && next->is_deallocation_record(), 184 }
184 "sorting order"); 185 #endif
185 assert(cur->seq() != next->seq(), "Sanity check"); 186 return _itr.current();
186 return cur->seq() > next->seq() ? cur : next; 187 }
187 }
188 }
189
190 virtual MemPointer* next() { 188 virtual MemPointer* next() {
191 MemPointerRecord* cur = (MemPointerRecord*)_itr.current(); 189 MemPointerRecord* next_rec = (MemPointerRecord*)_itr.next();
192 assert(cur == NULL || !cur->is_vm_pointer(), "Sanity check"); 190 // arena memory record is a special case, which we have to compare
193 MemPointerRecord* next = (MemPointerRecord*)_itr.next(); 191 // sequence number against its associated arena record.
194 if (next == NULL) { 192 if (next_rec != NULL && next_rec->is_arena_memory_record()) {
195 return NULL; 193 MemPointerRecord* prev_rec = (MemPointerRecord*)_itr.peek_prev();
196 } 194 // if there is an associated arena record, it has to be previous
197 if (cur->addr() == next->addr()) { 195 // record because of sorting order (by address) - NMT generates a pseudo address
198 next = (MemPointerRecord*)_itr.next(); 196 // for arena's size record by offsetting arena's address, that guarantees
199 } 197 // the order of arena record and it's size record.
200 return current(); 198 if (prev_rec != NULL && prev_rec->is_arena_record() &&
199 next_rec->is_memory_record_of_arena(prev_rec)) {
200 if (prev_rec->seq() > next_rec->seq()) {
201 // Skip this arena memory record
202 // Two scenarios:
203 // - if the arena record is an allocation record, this early
204 // size record must be leftover by previous arena,
205 // and the last size record should have size = 0.
206 // - if the arena record is a deallocation record, this
207 // size record should be its cleanup record, which should
208 // also have size = 0. In other world, arena alway reset
209 // its size before gone (see Arena's destructor)
210 assert(next_rec->size() == 0, "size not reset");
211 return _itr.next();
212 } else {
213 assert(prev_rec->is_allocation_record(),
214 "Arena size record ahead of allocation record");
215 }
216 }
217 }
218 return next_rec;
201 } 219 }
202 220
203 MemPointer* peek_next() const { ShouldNotReachHere(); return NULL; } 221 MemPointer* peek_next() const { ShouldNotReachHere(); return NULL; }
204 MemPointer* peek_prev() const { ShouldNotReachHere(); return NULL; } 222 MemPointer* peek_prev() const { ShouldNotReachHere(); return NULL; }
205 void remove() { ShouldNotReachHere(); } 223 void remove() { ShouldNotReachHere(); }
211 // cheaper than during promotion phase. However, it does have limitation - it 229 // cheaper than during promotion phase. However, it does have limitation - it
212 // can only eliminate duplicated records within the generation, there are 230 // can only eliminate duplicated records within the generation, there are
213 // still chances seeing duplicated records during promotion. 231 // still chances seeing duplicated records during promotion.
214 // We want to use the record with higher sequence number, because it has 232 // We want to use the record with higher sequence number, because it has
215 // more accurate callsite pc. 233 // more accurate callsite pc.
216 class VMRecordIterator : public MallocRecordIterator { 234 class VMRecordIterator : public MemPointerArrayIterator {
217 public: 235 private:
218 VMRecordIterator(MemPointerArray* arr) : MallocRecordIterator(arr) { 236 MemPointerArrayIteratorImpl _itr;
237
238 public:
239 VMRecordIterator(MemPointerArray* arr) : _itr(arr) {
219 MemPointerRecord* cur = (MemPointerRecord*)_itr.current(); 240 MemPointerRecord* cur = (MemPointerRecord*)_itr.current();
220 MemPointerRecord* next = (MemPointerRecord*)_itr.peek_next(); 241 MemPointerRecord* next = (MemPointerRecord*)_itr.peek_next();
221 while (next != NULL) { 242 while (next != NULL) {
222 assert(cur != NULL, "Sanity check"); 243 assert(cur != NULL, "Sanity check");
223 assert(((SeqMemPointerRecord*)next)->seq() > ((SeqMemPointerRecord*)cur)->seq(), 244 assert(((SeqMemPointerRecord*)next)->seq() > ((SeqMemPointerRecord*)cur)->seq(),
254 } 275 }
255 } 276 }
256 return cur; 277 return cur;
257 } 278 }
258 279
280 MemPointer* peek_next() const { ShouldNotReachHere(); return NULL; }
281 MemPointer* peek_prev() const { ShouldNotReachHere(); return NULL; }
282 void remove() { ShouldNotReachHere(); }
283 bool insert(MemPointer* ptr) { ShouldNotReachHere(); return false; }
284 bool insert_after(MemPointer* ptr) { ShouldNotReachHere(); return false; }
285
259 private: 286 private:
260 bool is_duplicated_record(MemPointerRecord* p1, MemPointerRecord* p2) const { 287 bool is_duplicated_record(MemPointerRecord* p1, MemPointerRecord* p2) const {
261 bool ret = (p1->addr() == p2->addr() && p1->size() == p2->size() && p1->flags() == p2->flags()); 288 bool ret = (p1->addr() == p2->addr() && p1->size() == p2->size() && p1->flags() == p2->flags());
262 assert(!(ret && FLAGS_TO_MEMORY_TYPE(p1->flags()) == mtThreadStack), "dup on stack record"); 289 assert(!(ret && FLAGS_TO_MEMORY_TYPE(p1->flags()) == mtThreadStack), "dup on stack record");
263 return ret; 290 return ret;
346 NOT_PRODUCT(bool has_allocation_record(address addr);) 373 NOT_PRODUCT(bool has_allocation_record(address addr);)
347 // dump all virtual memory pointers in snapshot 374 // dump all virtual memory pointers in snapshot
348 DEBUG_ONLY( void dump_all_vm_pointers();) 375 DEBUG_ONLY( void dump_all_vm_pointers();)
349 376
350 private: 377 private:
351 // copy pointer data from src to dest 378 // copy sequenced pointer from src to dest
352 void copy_pointer(MemPointerRecord* dest, const MemPointerRecord* src); 379 void copy_seq_pointer(MemPointerRecord* dest, const MemPointerRecord* src);
380 // assign a sequenced pointer to non-sequenced pointer
381 void assign_pointer(MemPointerRecord*dest, const MemPointerRecord* src);
353 382
354 bool promote_malloc_records(MemPointerArrayIterator* itr); 383 bool promote_malloc_records(MemPointerArrayIterator* itr);
355 bool promote_virtual_memory_records(MemPointerArrayIterator* itr); 384 bool promote_virtual_memory_records(MemPointerArrayIterator* itr);
356 }; 385 };
357 386