comparison src/share/vm/memory/allocation.hpp @ 0:a61af66fc99e jdk7-b24

Initial load
author duke
date Sat, 01 Dec 2007 00:00:00 +0000
parents
children 37f87013dfd8
comparison
equal deleted inserted replaced
-1:000000000000 0:a61af66fc99e
1 /*
2 * Copyright 1997-2005 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
24
25 #define ARENA_ALIGN_M1 (((size_t)(ARENA_AMALLOC_ALIGNMENT)) - 1)
26 #define ARENA_ALIGN_MASK (~((size_t)ARENA_ALIGN_M1))
27 #define ARENA_ALIGN(x) ((((size_t)(x)) + ARENA_ALIGN_M1) & ARENA_ALIGN_MASK)
28
29 // All classes in the virtual machine must be subclassed
30 // by one of the following allocation classes:
31 //
32 // For objects allocated in the resource area (see resourceArea.hpp).
33 // - ResourceObj
34 //
35 // For objects allocated in the C-heap (managed by: free & malloc).
36 // - CHeapObj
37 //
38 // For objects allocated on the stack.
39 // - StackObj
40 //
41 // For embedded objects.
42 // - ValueObj
43 //
44 // For classes used as name spaces.
45 // - AllStatic
46 //
47 // The printable subclasses are used for debugging and define virtual
48 // member functions for printing. Classes that avoid allocating the
49 // vtbl entries in the objects should therefore not be the printable
50 // subclasses.
51 //
52 // The following macros and function should be used to allocate memory
53 // directly in the resource area or in the C-heap:
54 //
55 // NEW_RESOURCE_ARRAY(type,size)
56 // NEW_RESOURCE_OBJ(type)
57 // NEW_C_HEAP_ARRAY(type,size)
58 // NEW_C_HEAP_OBJ(type)
59 // char* AllocateHeap(size_t size, const char* name);
60 // void FreeHeap(void* p);
61 //
62 // C-heap allocation can be traced using +PrintHeapAllocation.
63 // malloc and free should therefore never called directly.
64
65 // Base class for objects allocated in the C-heap.
66
67 // In non product mode we introduce a super class for all allocation classes
68 // that supports printing.
69 // We avoid the superclass in product mode since some C++ compilers add
70 // a word overhead for empty super classes.
71
72 #ifdef PRODUCT
73 #define ALLOCATION_SUPER_CLASS_SPEC
74 #else
75 #define ALLOCATION_SUPER_CLASS_SPEC : public AllocatedObj
76 class AllocatedObj {
77 public:
78 // Printing support
79 void print() const;
80 void print_value() const;
81
82 virtual void print_on(outputStream* st) const;
83 virtual void print_value_on(outputStream* st) const;
84 };
85 #endif
86
87 class CHeapObj ALLOCATION_SUPER_CLASS_SPEC {
88 public:
89 void* operator new(size_t size);
90 void operator delete(void* p);
91 void* new_array(size_t size);
92 };
93
94 // Base class for objects allocated on the stack only.
95 // Calling new or delete will result in fatal error.
96
97 class StackObj ALLOCATION_SUPER_CLASS_SPEC {
98 public:
99 void* operator new(size_t size);
100 void operator delete(void* p);
101 };
102
103 // Base class for objects used as value objects.
104 // Calling new or delete will result in fatal error.
105 //
106 // Portability note: Certain compilers (e.g. gcc) will
107 // always make classes bigger if it has a superclass, even
108 // if the superclass does not have any virtual methods or
109 // instance fields. The HotSpot implementation relies on this
110 // not to happen. So never make a ValueObj class a direct subclass
111 // of this object, but use the VALUE_OBJ_CLASS_SPEC class instead, e.g.,
112 // like this:
113 //
114 // class A VALUE_OBJ_CLASS_SPEC {
115 // ...
116 // }
117 //
118 // With gcc and possible other compilers the VALUE_OBJ_CLASS_SPEC can
119 // be defined as a an empty string "".
120 //
121 class _ValueObj {
122 public:
123 void* operator new(size_t size);
124 void operator delete(void* p);
125 };
126
127 // Base class for classes that constitute name spaces.
128
129 class AllStatic {
130 public:
131 AllStatic() { ShouldNotCallThis(); }
132 ~AllStatic() { ShouldNotCallThis(); }
133 };
134
135
136 //------------------------------Chunk------------------------------------------
137 // Linked list of raw memory chunks
138 class Chunk: public CHeapObj {
139 protected:
140 Chunk* _next; // Next Chunk in list
141 const size_t _len; // Size of this Chunk
142 public:
143 void* operator new(size_t size, size_t length);
144 void operator delete(void* p);
145 Chunk(size_t length);
146
147 enum {
148 // default sizes; make them slightly smaller than 2**k to guard against
149 // buddy-system style malloc implementations
150 #ifdef _LP64
151 slack = 40, // [RGV] Not sure if this is right, but make it
152 // a multiple of 8.
153 #else
154 slack = 20, // suspected sizeof(Chunk) + internal malloc headers
155 #endif
156
157 init_size = 1*K - slack, // Size of first chunk
158 medium_size= 10*K - slack, // Size of medium-sized chunk
159 size = 32*K - slack, // Default size of an Arena chunk (following the first)
160 non_pool_size = init_size + 32 // An initial size which is not one of above
161 };
162
163 void chop(); // Chop this chunk
164 void next_chop(); // Chop next chunk
165 static size_t aligned_overhead_size(void) { return ARENA_ALIGN(sizeof(Chunk)); }
166
167 size_t length() const { return _len; }
168 Chunk* next() const { return _next; }
169 void set_next(Chunk* n) { _next = n; }
170 // Boundaries of data area (possibly unused)
171 char* bottom() const { return ((char*) this) + aligned_overhead_size(); }
172 char* top() const { return bottom() + _len; }
173 bool contains(char* p) const { return bottom() <= p && p <= top(); }
174
175 // Start the chunk_pool cleaner task
176 static void start_chunk_pool_cleaner_task();
177 };
178
179
180 //------------------------------Arena------------------------------------------
181 // Fast allocation of memory
182 class Arena: public CHeapObj {
183 protected:
184 friend class ResourceMark;
185 friend class HandleMark;
186 friend class NoHandleMark;
187 Chunk *_first; // First chunk
188 Chunk *_chunk; // current chunk
189 char *_hwm, *_max; // High water mark and max in current chunk
190 void* grow(size_t x); // Get a new Chunk of at least size x
191 NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing)
192 NOT_PRODUCT(static size_t _bytes_allocated;) // total #bytes allocated since start
193 friend class AllocStats;
194 debug_only(void* malloc(size_t size);)
195 debug_only(void* internal_malloc_4(size_t x);)
196 public:
197 Arena();
198 Arena(size_t init_size);
199 Arena(Arena *old);
200 ~Arena();
201 void destruct_contents();
202 char* hwm() const { return _hwm; }
203
204 // Fast allocate in the arena. Common case is: pointer test + increment.
205 void* Amalloc(size_t x) {
206 assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
207 x = ARENA_ALIGN(x);
208 debug_only(if (UseMallocOnly) return malloc(x);)
209 NOT_PRODUCT(_bytes_allocated += x);
210 if (_hwm + x > _max) {
211 return grow(x);
212 } else {
213 char *old = _hwm;
214 _hwm += x;
215 return old;
216 }
217 }
218 // Further assume size is padded out to words
219 void *Amalloc_4(size_t x) {
220 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
221 debug_only(if (UseMallocOnly) return malloc(x);)
222 NOT_PRODUCT(_bytes_allocated += x);
223 if (_hwm + x > _max) {
224 return grow(x);
225 } else {
226 char *old = _hwm;
227 _hwm += x;
228 return old;
229 }
230 }
231
232 // Allocate with 'double' alignment. It is 8 bytes on sparc.
233 // In other cases Amalloc_D() should be the same as Amalloc_4().
234 void* Amalloc_D(size_t x) {
235 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
236 debug_only(if (UseMallocOnly) return malloc(x);)
237 #if defined(SPARC) && !defined(_LP64)
238 #define DALIGN_M1 7
239 size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
240 x += delta;
241 #endif
242 NOT_PRODUCT(_bytes_allocated += x);
243 if (_hwm + x > _max) {
244 return grow(x); // grow() returns a result aligned >= 8 bytes.
245 } else {
246 char *old = _hwm;
247 _hwm += x;
248 #if defined(SPARC) && !defined(_LP64)
249 old += delta; // align to 8-bytes
250 #endif
251 return old;
252 }
253 }
254
255 // Fast delete in area. Common case is: NOP (except for storage reclaimed)
256 void Afree(void *ptr, size_t size) {
257 #ifdef ASSERT
258 if (ZapResourceArea) memset(ptr, badResourceValue, size); // zap freed memory
259 if (UseMallocOnly) return;
260 #endif
261 if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;
262 }
263
264 void *Arealloc( void *old_ptr, size_t old_size, size_t new_size );
265
266 // Move contents of this arena into an empty arena
267 Arena *move_contents(Arena *empty_arena);
268
269 // Determine if pointer belongs to this Arena or not.
270 bool contains( const void *ptr ) const;
271
272 // Total of all chunks in use (not thread-safe)
273 size_t used() const;
274
275 // Total # of bytes used
276 size_t size_in_bytes() const NOT_PRODUCT({ return _size_in_bytes; }) PRODUCT_RETURN0;
277 void set_size_in_bytes(size_t size) NOT_PRODUCT({ _size_in_bytes = size; }) PRODUCT_RETURN;
278 static void free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2) PRODUCT_RETURN;
279 static void free_all(char** start, char** end) PRODUCT_RETURN;
280
281 private:
282 // Reset this Arena to empty, access will trigger grow if necessary
283 void reset(void) {
284 _first = _chunk = NULL;
285 _hwm = _max = NULL;
286 }
287 };
288
289 // One of the following macros must be used when allocating
290 // an array or object from an arena
291 #define NEW_ARENA_ARRAY(arena, type, size)\
292 (type*) arena->Amalloc((size) * sizeof(type))
293
294 #define REALLOC_ARENA_ARRAY(arena, type, old, old_size, new_size)\
295 (type*) arena->Arealloc((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type) )
296
297 #define FREE_ARENA_ARRAY(arena, type, old, size)\
298 arena->Afree((char*)(old), (size) * sizeof(type))
299
300 #define NEW_ARENA_OBJ(arena, type)\
301 NEW_ARENA_ARRAY(arena, type, 1)
302
303
304 //%note allocation_1
305 extern char* resource_allocate_bytes(size_t size);
306 extern char* resource_allocate_bytes(Thread* thread, size_t size);
307 extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size);
308 extern void resource_free_bytes( char *old, size_t size );
309
310 //----------------------------------------------------------------------
311 // Base class for objects allocated in the resource area per default.
312 // Optionally, objects may be allocated on the C heap with
313 // new(ResourceObj::C_HEAP) Foo(...) or in an Arena with new (&arena)
314 // ResourceObj's can be allocated within other objects, but don't use
315 // new or delete (allocation_type is unknown). If new is used to allocate,
316 // use delete to deallocate.
317 class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {
318 public:
319 enum allocation_type { UNKNOWN = 0, C_HEAP, RESOURCE_AREA, ARENA };
320 #ifdef ASSERT
321 private:
322 allocation_type _allocation;
323 public:
324 bool allocated_on_C_heap() { return _allocation == C_HEAP; }
325 #endif // ASSERT
326
327 public:
328 void* operator new(size_t size, allocation_type type);
329 void* operator new(size_t size, Arena *arena) {
330 address res = (address)arena->Amalloc(size);
331 // Set allocation type in the resource object
332 DEBUG_ONLY(((ResourceObj *)res)->_allocation = ARENA;)
333 return res;
334 }
335 void* operator new(size_t size) {
336 address res = (address)resource_allocate_bytes(size);
337 // Set allocation type in the resource object
338 DEBUG_ONLY(((ResourceObj *)res)->_allocation = RESOURCE_AREA;)
339 return res;
340 }
341 void operator delete(void* p);
342 };
343
344 // One of the following macros must be used when allocating an array
345 // or object to determine whether it should reside in the C heap on in
346 // the resource area.
347
348 #define NEW_RESOURCE_ARRAY(type, size)\
349 (type*) resource_allocate_bytes((size) * sizeof(type))
350
351 #define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\
352 (type*) resource_allocate_bytes(thread, (size) * sizeof(type))
353
354 #define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\
355 (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type) )
356
357 #define FREE_RESOURCE_ARRAY(type, old, size)\
358 resource_free_bytes((char*)(old), (size) * sizeof(type))
359
360 #define FREE_FAST(old)\
361 /* nop */
362
363 #define NEW_RESOURCE_OBJ(type)\
364 NEW_RESOURCE_ARRAY(type, 1)
365
366 #define NEW_C_HEAP_ARRAY(type, size)\
367 (type*) (AllocateHeap((size) * sizeof(type), XSTR(type) " in " __FILE__))
368
369 #define REALLOC_C_HEAP_ARRAY(type, old, size)\
370 (type*) (ReallocateHeap((char*)old, (size) * sizeof(type), XSTR(type) " in " __FILE__))
371
372 #define FREE_C_HEAP_ARRAY(type,old) \
373 FreeHeap((char*)(old))
374
375 #define NEW_C_HEAP_OBJ(type)\
376 NEW_C_HEAP_ARRAY(type, 1)
377
378 extern bool warn_new_operator;
379
380 // for statistics
381 #ifndef PRODUCT
382 class AllocStats : StackObj {
383 int start_mallocs, start_frees;
384 size_t start_malloc_bytes, start_res_bytes;
385 public:
386 AllocStats();
387
388 int num_mallocs(); // since creation of receiver
389 size_t alloc_bytes();
390 size_t resource_bytes();
391 int num_frees();
392 void print();
393 };
394 #endif
395
396
397 //------------------------------ReallocMark---------------------------------
398 // Code which uses REALLOC_RESOURCE_ARRAY should check an associated
399 // ReallocMark, which is declared in the same scope as the reallocated
400 // pointer. Any operation that could __potentially__ cause a reallocation
401 // should check the ReallocMark.
402 class ReallocMark: public StackObj {
403 protected:
404 NOT_PRODUCT(int _nesting;)
405
406 public:
407 ReallocMark() PRODUCT_RETURN;
408 void check() PRODUCT_RETURN;
409 };