comparison src/share/vm/utilities/stack.inline.hpp @ 6197:d2a62e0f25eb

6995781: Native Memory Tracking (Phase 1) 7151532: DCmd for hotspot native memory tracking Summary: Implementation of native memory tracking phase 1, which tracks VM native memory usage, and related DCmd Reviewed-by: acorn, coleenp, fparain
author zgu
date Thu, 28 Jun 2012 17:03:16 -0400
parents f95d63e2154a
children b9a9ed0f8eeb
comparison
equal deleted inserted replaced
6174:74533f63b116 6197:d2a62e0f25eb
25 #ifndef SHARE_VM_UTILITIES_STACK_INLINE_HPP 25 #ifndef SHARE_VM_UTILITIES_STACK_INLINE_HPP
26 #define SHARE_VM_UTILITIES_STACK_INLINE_HPP 26 #define SHARE_VM_UTILITIES_STACK_INLINE_HPP
27 27
28 #include "utilities/stack.hpp" 28 #include "utilities/stack.hpp"
29 29
30 StackBase::StackBase(size_t segment_size, size_t max_cache_size, 30 template <MEMFLAGS F> StackBase<F>::StackBase(size_t segment_size, size_t max_cache_size,
31 size_t max_size): 31 size_t max_size):
32 _seg_size(segment_size), 32 _seg_size(segment_size),
33 _max_cache_size(max_cache_size), 33 _max_cache_size(max_cache_size),
34 _max_size(adjust_max_size(max_size, segment_size)) 34 _max_size(adjust_max_size(max_size, segment_size))
35 { 35 {
36 assert(_max_size % _seg_size == 0, "not a multiple"); 36 assert(_max_size % _seg_size == 0, "not a multiple");
37 } 37 }
38 38
39 size_t StackBase::adjust_max_size(size_t max_size, size_t seg_size) 39 template <MEMFLAGS F> size_t StackBase<F>::adjust_max_size(size_t max_size, size_t seg_size)
40 { 40 {
41 assert(seg_size > 0, "cannot be 0"); 41 assert(seg_size > 0, "cannot be 0");
42 assert(max_size >= seg_size || max_size == 0, "max_size too small"); 42 assert(max_size >= seg_size || max_size == 0, "max_size too small");
43 const size_t limit = max_uintx - (seg_size - 1); 43 const size_t limit = max_uintx - (seg_size - 1);
44 if (max_size == 0 || max_size > limit) { 44 if (max_size == 0 || max_size > limit) {
45 max_size = limit; 45 max_size = limit;
46 } 46 }
47 return (max_size + seg_size - 1) / seg_size * seg_size; 47 return (max_size + seg_size - 1) / seg_size * seg_size;
48 } 48 }
49 49
50 template <class E> 50 template <class E, MEMFLAGS F>
51 Stack<E>::Stack(size_t segment_size, size_t max_cache_size, size_t max_size): 51 Stack<E, F>::Stack(size_t segment_size, size_t max_cache_size, size_t max_size):
52 StackBase(adjust_segment_size(segment_size), max_cache_size, max_size) 52 StackBase<F>(adjust_segment_size(segment_size), max_cache_size, max_size)
53 { 53 {
54 reset(true); 54 reset(true);
55 } 55 }
56 56
57 template <class E> 57 template <class E, MEMFLAGS F>
58 void Stack<E>::push(E item) 58 void Stack<E, F>::push(E item)
59 { 59 {
60 assert(!is_full(), "pushing onto a full stack"); 60 assert(!is_full(), "pushing onto a full stack");
61 if (_cur_seg_size == _seg_size) { 61 if (this->_cur_seg_size == this->_seg_size) {
62 push_segment(); 62 push_segment();
63 } 63 }
64 _cur_seg[_cur_seg_size] = item; 64 this->_cur_seg[this->_cur_seg_size] = item;
65 ++_cur_seg_size; 65 ++this->_cur_seg_size;
66 } 66 }
67 67
68 template <class E> 68 template <class E, MEMFLAGS F>
69 E Stack<E>::pop() 69 E Stack<E, F>::pop()
70 { 70 {
71 assert(!is_empty(), "popping from an empty stack"); 71 assert(!is_empty(), "popping from an empty stack");
72 if (_cur_seg_size == 1) { 72 if (this->_cur_seg_size == 1) {
73 E tmp = _cur_seg[--_cur_seg_size]; 73 E tmp = _cur_seg[--this->_cur_seg_size];
74 pop_segment(); 74 pop_segment();
75 return tmp; 75 return tmp;
76 } 76 }
77 return _cur_seg[--_cur_seg_size]; 77 return this->_cur_seg[--this->_cur_seg_size];
78 } 78 }
79 79
80 template <class E> 80 template <class E, MEMFLAGS F>
81 void Stack<E>::clear(bool clear_cache) 81 void Stack<E, F>::clear(bool clear_cache)
82 { 82 {
83 free_segments(_cur_seg); 83 free_segments(_cur_seg);
84 if (clear_cache) free_segments(_cache); 84 if (clear_cache) free_segments(_cache);
85 reset(clear_cache); 85 reset(clear_cache);
86 } 86 }
87 87
88 template <class E> 88 template <class E, MEMFLAGS F>
89 size_t Stack<E>::default_segment_size() 89 size_t Stack<E, F>::default_segment_size()
90 { 90 {
91 // Number of elements that fit in 4K bytes minus the size of two pointers 91 // Number of elements that fit in 4K bytes minus the size of two pointers
92 // (link field and malloc header). 92 // (link field and malloc header).
93 return (4096 - 2 * sizeof(E*)) / sizeof(E); 93 return (4096 - 2 * sizeof(E*)) / sizeof(E);
94 } 94 }
95 95
96 template <class E> 96 template <class E, MEMFLAGS F>
97 size_t Stack<E>::adjust_segment_size(size_t seg_size) 97 size_t Stack<E, F>::adjust_segment_size(size_t seg_size)
98 { 98 {
99 const size_t elem_sz = sizeof(E); 99 const size_t elem_sz = sizeof(E);
100 const size_t ptr_sz = sizeof(E*); 100 const size_t ptr_sz = sizeof(E*);
101 assert(elem_sz % ptr_sz == 0 || ptr_sz % elem_sz == 0, "bad element size"); 101 assert(elem_sz % ptr_sz == 0 || ptr_sz % elem_sz == 0, "bad element size");
102 if (elem_sz < ptr_sz) { 102 if (elem_sz < ptr_sz) {
103 return align_size_up(seg_size * elem_sz, ptr_sz) / elem_sz; 103 return align_size_up(seg_size * elem_sz, ptr_sz) / elem_sz;
104 } 104 }
105 return seg_size; 105 return seg_size;
106 } 106 }
107 107
108 template <class E> 108 template <class E, MEMFLAGS F>
109 size_t Stack<E>::link_offset() const 109 size_t Stack<E, F>::link_offset() const
110 { 110 {
111 return align_size_up(_seg_size * sizeof(E), sizeof(E*)); 111 return align_size_up(this->_seg_size * sizeof(E), sizeof(E*));
112 } 112 }
113 113
114 template <class E> 114 template <class E, MEMFLAGS F>
115 size_t Stack<E>::segment_bytes() const 115 size_t Stack<E, F>::segment_bytes() const
116 { 116 {
117 return link_offset() + sizeof(E*); 117 return link_offset() + sizeof(E*);
118 } 118 }
119 119
120 template <class E> 120 template <class E, MEMFLAGS F>
121 E** Stack<E>::link_addr(E* seg) const 121 E** Stack<E, F>::link_addr(E* seg) const
122 { 122 {
123 return (E**) ((char*)seg + link_offset()); 123 return (E**) ((char*)seg + link_offset());
124 } 124 }
125 125
126 template <class E> 126 template <class E, MEMFLAGS F>
127 E* Stack<E>::get_link(E* seg) const 127 E* Stack<E, F>::get_link(E* seg) const
128 { 128 {
129 return *link_addr(seg); 129 return *link_addr(seg);
130 } 130 }
131 131
132 template <class E> 132 template <class E, MEMFLAGS F>
133 E* Stack<E>::set_link(E* new_seg, E* old_seg) 133 E* Stack<E, F>::set_link(E* new_seg, E* old_seg)
134 { 134 {
135 *link_addr(new_seg) = old_seg; 135 *link_addr(new_seg) = old_seg;
136 return new_seg; 136 return new_seg;
137 } 137 }
138 138
139 template <class E> 139 template <class E, MEMFLAGS F>
140 E* Stack<E>::alloc(size_t bytes) 140 E* Stack<E, F>::alloc(size_t bytes)
141 { 141 {
142 return (E*) NEW_C_HEAP_ARRAY(char, bytes); 142 return (E*) NEW_C_HEAP_ARRAY(char, bytes, F);
143 } 143 }
144 144
145 template <class E> 145 template <class E, MEMFLAGS F>
146 void Stack<E>::free(E* addr, size_t bytes) 146 void Stack<E, F>::free(E* addr, size_t bytes)
147 { 147 {
148 FREE_C_HEAP_ARRAY(char, (char*) addr); 148 FREE_C_HEAP_ARRAY(char, (char*) addr, F);
149 } 149 }
150 150
151 template <class E> 151 template <class E, MEMFLAGS F>
152 void Stack<E>::push_segment() 152 void Stack<E, F>::push_segment()
153 { 153 {
154 assert(_cur_seg_size == _seg_size, "current segment is not full"); 154 assert(this->_cur_seg_size == this->_seg_size, "current segment is not full");
155 E* next; 155 E* next;
156 if (_cache_size > 0) { 156 if (this->_cache_size > 0) {
157 // Use a cached segment. 157 // Use a cached segment.
158 next = _cache; 158 next = _cache;
159 _cache = get_link(_cache); 159 _cache = get_link(_cache);
160 --_cache_size; 160 --this->_cache_size;
161 } else { 161 } else {
162 next = alloc(segment_bytes()); 162 next = alloc(segment_bytes());
163 DEBUG_ONLY(zap_segment(next, true);) 163 DEBUG_ONLY(zap_segment(next, true);)
164 } 164 }
165 const bool at_empty_transition = is_empty(); 165 const bool at_empty_transition = is_empty();
166 _cur_seg = set_link(next, _cur_seg); 166 this->_cur_seg = set_link(next, _cur_seg);
167 _cur_seg_size = 0; 167 this->_cur_seg_size = 0;
168 _full_seg_size += at_empty_transition ? 0 : _seg_size; 168 this->_full_seg_size += at_empty_transition ? 0 : this->_seg_size;
169 DEBUG_ONLY(verify(at_empty_transition);) 169 DEBUG_ONLY(verify(at_empty_transition);)
170 } 170 }
171 171
172 template <class E> 172 template <class E, MEMFLAGS F>
173 void Stack<E>::pop_segment() 173 void Stack<E, F>::pop_segment()
174 { 174 {
175 assert(_cur_seg_size == 0, "current segment is not empty"); 175 assert(this->_cur_seg_size == 0, "current segment is not empty");
176 E* const prev = get_link(_cur_seg); 176 E* const prev = get_link(_cur_seg);
177 if (_cache_size < _max_cache_size) { 177 if (this->_cache_size < this->_max_cache_size) {
178 // Add the current segment to the cache. 178 // Add the current segment to the cache.
179 DEBUG_ONLY(zap_segment(_cur_seg, false);) 179 DEBUG_ONLY(zap_segment(_cur_seg, false);)
180 _cache = set_link(_cur_seg, _cache); 180 _cache = set_link(_cur_seg, _cache);
181 ++_cache_size; 181 ++this->_cache_size;
182 } else { 182 } else {
183 DEBUG_ONLY(zap_segment(_cur_seg, true);) 183 DEBUG_ONLY(zap_segment(_cur_seg, true);)
184 free(_cur_seg, segment_bytes()); 184 free(_cur_seg, segment_bytes());
185 } 185 }
186 const bool at_empty_transition = prev == NULL; 186 const bool at_empty_transition = prev == NULL;
187 _cur_seg = prev; 187 this->_cur_seg = prev;
188 _cur_seg_size = _seg_size; 188 this->_cur_seg_size = this->_seg_size;
189 _full_seg_size -= at_empty_transition ? 0 : _seg_size; 189 this->_full_seg_size -= at_empty_transition ? 0 : this->_seg_size;
190 DEBUG_ONLY(verify(at_empty_transition);) 190 DEBUG_ONLY(verify(at_empty_transition);)
191 } 191 }
192 192
193 template <class E> 193 template <class E, MEMFLAGS F>
194 void Stack<E>::free_segments(E* seg) 194 void Stack<E, F>::free_segments(E* seg)
195 { 195 {
196 const size_t bytes = segment_bytes(); 196 const size_t bytes = segment_bytes();
197 while (seg != NULL) { 197 while (seg != NULL) {
198 E* const prev = get_link(seg); 198 E* const prev = get_link(seg);
199 free(seg, bytes); 199 free(seg, bytes);
200 seg = prev; 200 seg = prev;
201 } 201 }
202 } 202 }
203 203
204 template <class E> 204 template <class E, MEMFLAGS F>
205 void Stack<E>::reset(bool reset_cache) 205 void Stack<E, F>::reset(bool reset_cache)
206 { 206 {
207 _cur_seg_size = _seg_size; // So push() will alloc a new segment. 207 this->_cur_seg_size = this->_seg_size; // So push() will alloc a new segment.
208 _full_seg_size = 0; 208 this->_full_seg_size = 0;
209 _cur_seg = NULL; 209 _cur_seg = NULL;
210 if (reset_cache) { 210 if (reset_cache) {
211 _cache_size = 0; 211 this->_cache_size = 0;
212 _cache = NULL; 212 _cache = NULL;
213 } 213 }
214 } 214 }
215 215
216 #ifdef ASSERT 216 #ifdef ASSERT
217 template <class E> 217 template <class E, MEMFLAGS F>
218 void Stack<E>::verify(bool at_empty_transition) const 218 void Stack<E, F>::verify(bool at_empty_transition) const
219 { 219 {
220 assert(size() <= max_size(), "stack exceeded bounds"); 220 assert(size() <= this->max_size(), "stack exceeded bounds");
221 assert(cache_size() <= max_cache_size(), "cache exceeded bounds"); 221 assert(this->cache_size() <= this->max_cache_size(), "cache exceeded bounds");
222 assert(_cur_seg_size <= segment_size(), "segment index exceeded bounds"); 222 assert(this->_cur_seg_size <= this->segment_size(), "segment index exceeded bounds");
223 223
224 assert(_full_seg_size % _seg_size == 0, "not a multiple"); 224 assert(this->_full_seg_size % this->_seg_size == 0, "not a multiple");
225 assert(at_empty_transition || is_empty() == (size() == 0), "mismatch"); 225 assert(at_empty_transition || is_empty() == (size() == 0), "mismatch");
226 assert((_cache == NULL) == (cache_size() == 0), "mismatch"); 226 assert((_cache == NULL) == (this->cache_size() == 0), "mismatch");
227 227
228 if (is_empty()) { 228 if (is_empty()) {
229 assert(_cur_seg_size == segment_size(), "sanity"); 229 assert(this->_cur_seg_size == this->segment_size(), "sanity");
230 } 230 }
231 } 231 }
232 232
233 template <class E> 233 template <class E, MEMFLAGS F>
234 void Stack<E>::zap_segment(E* seg, bool zap_link_field) const 234 void Stack<E, F>::zap_segment(E* seg, bool zap_link_field) const
235 { 235 {
236 if (!ZapStackSegments) return; 236 if (!ZapStackSegments) return;
237 const size_t zap_bytes = segment_bytes() - (zap_link_field ? 0 : sizeof(E*)); 237 const size_t zap_bytes = segment_bytes() - (zap_link_field ? 0 : sizeof(E*));
238 uint32_t* cur = (uint32_t*)seg; 238 uint32_t* cur = (uint32_t*)seg;
239 const uint32_t* end = cur + zap_bytes / sizeof(uint32_t); 239 const uint32_t* end = cur + zap_bytes / sizeof(uint32_t);
241 *cur++ = 0xfadfaded; 241 *cur++ = 0xfadfaded;
242 } 242 }
243 } 243 }
244 #endif 244 #endif
245 245
246 template <class E> 246 template <class E, MEMFLAGS F>
247 E* ResourceStack<E>::alloc(size_t bytes) 247 E* ResourceStack<E, F>::alloc(size_t bytes)
248 { 248 {
249 return (E*) resource_allocate_bytes(bytes); 249 return (E*) resource_allocate_bytes(bytes);
250 } 250 }
251 251
252 template <class E> 252 template <class E, MEMFLAGS F>
253 void ResourceStack<E>::free(E* addr, size_t bytes) 253 void ResourceStack<E, F>::free(E* addr, size_t bytes)
254 { 254 {
255 resource_free_bytes((char*) addr, bytes); 255 resource_free_bytes((char*) addr, bytes);
256 } 256 }
257 257
258 template <class E> 258 template <class E, MEMFLAGS F>
259 void StackIterator<E>::sync() 259 void StackIterator<E, F>::sync()
260 { 260 {
261 _full_seg_size = _stack._full_seg_size; 261 _full_seg_size = _stack._full_seg_size;
262 _cur_seg_size = _stack._cur_seg_size; 262 _cur_seg_size = _stack._cur_seg_size;
263 _cur_seg = _stack._cur_seg; 263 _cur_seg = _stack._cur_seg;
264 } 264 }
265 265
266 template <class E> 266 template <class E, MEMFLAGS F>
267 E* StackIterator<E>::next_addr() 267 E* StackIterator<E, F>::next_addr()
268 { 268 {
269 assert(!is_empty(), "no items left"); 269 assert(!is_empty(), "no items left");
270 if (_cur_seg_size == 1) { 270 if (_cur_seg_size == 1) {
271 E* addr = _cur_seg; 271 E* addr = _cur_seg;
272 _cur_seg = _stack.get_link(_cur_seg); 272 _cur_seg = _stack.get_link(_cur_seg);