comparison src/share/vm/memory/allocation.hpp @ 2250:f7de3327c683

7017124: Fix some VM stats to avoid 32-bit overflow Summary: Added new method inc_stat_counter() to increment long statistic values and use atomic long load and store. Reviewed-by: dholmes, jrose, phh, never
author kvn
date Mon, 07 Feb 2011 10:34:39 -0800
parents 79d8657be916
children 4a9604cd7c5f
comparison
equal deleted inserted replaced
2249:3763ca6579b7 2250:f7de3327c683
1 /* 1 /*
2 * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
200 Chunk *_first; // First chunk 200 Chunk *_first; // First chunk
201 Chunk *_chunk; // current chunk 201 Chunk *_chunk; // current chunk
202 char *_hwm, *_max; // High water mark and max in current chunk 202 char *_hwm, *_max; // High water mark and max in current chunk
203 void* grow(size_t x); // Get a new Chunk of at least size x 203 void* grow(size_t x); // Get a new Chunk of at least size x
204 NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing) 204 NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing)
205 NOT_PRODUCT(static size_t _bytes_allocated;) // total #bytes allocated since start 205 NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
206 friend class AllocStats; 206 friend class AllocStats;
207 debug_only(void* malloc(size_t size);) 207 debug_only(void* malloc(size_t size);)
208 debug_only(void* internal_malloc_4(size_t x);) 208 debug_only(void* internal_malloc_4(size_t x);)
209 NOT_PRODUCT(void inc_bytes_allocated(size_t x);)
209 public: 210 public:
210 Arena(); 211 Arena();
211 Arena(size_t init_size); 212 Arena(size_t init_size);
212 Arena(Arena *old); 213 Arena(Arena *old);
213 ~Arena(); 214 ~Arena();
217 // Fast allocate in the arena. Common case is: pointer test + increment. 218 // Fast allocate in the arena. Common case is: pointer test + increment.
218 void* Amalloc(size_t x) { 219 void* Amalloc(size_t x) {
219 assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2"); 220 assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
220 x = ARENA_ALIGN(x); 221 x = ARENA_ALIGN(x);
221 debug_only(if (UseMallocOnly) return malloc(x);) 222 debug_only(if (UseMallocOnly) return malloc(x);)
222 NOT_PRODUCT(_bytes_allocated += x); 223 NOT_PRODUCT(inc_bytes_allocated(x);)
223 if (_hwm + x > _max) { 224 if (_hwm + x > _max) {
224 return grow(x); 225 return grow(x);
225 } else { 226 } else {
226 char *old = _hwm; 227 char *old = _hwm;
227 _hwm += x; 228 _hwm += x;
230 } 231 }
231 // Further assume size is padded out to words 232 // Further assume size is padded out to words
232 void *Amalloc_4(size_t x) { 233 void *Amalloc_4(size_t x) {
233 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" ); 234 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
234 debug_only(if (UseMallocOnly) return malloc(x);) 235 debug_only(if (UseMallocOnly) return malloc(x);)
235 NOT_PRODUCT(_bytes_allocated += x); 236 NOT_PRODUCT(inc_bytes_allocated(x);)
236 if (_hwm + x > _max) { 237 if (_hwm + x > _max) {
237 return grow(x); 238 return grow(x);
238 } else { 239 } else {
239 char *old = _hwm; 240 char *old = _hwm;
240 _hwm += x; 241 _hwm += x;
250 #if defined(SPARC) && !defined(_LP64) 251 #if defined(SPARC) && !defined(_LP64)
251 #define DALIGN_M1 7 252 #define DALIGN_M1 7
252 size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm; 253 size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
253 x += delta; 254 x += delta;
254 #endif 255 #endif
255 NOT_PRODUCT(_bytes_allocated += x); 256 NOT_PRODUCT(inc_bytes_allocated(x);)
256 if (_hwm + x > _max) { 257 if (_hwm + x > _max) {
257 return grow(x); // grow() returns a result aligned >= 8 bytes. 258 return grow(x); // grow() returns a result aligned >= 8 bytes.
258 } else { 259 } else {
259 char *old = _hwm; 260 char *old = _hwm;
260 _hwm += x; 261 _hwm += x;
404 extern bool warn_new_operator; 405 extern bool warn_new_operator;
405 406
406 // for statistics 407 // for statistics
407 #ifndef PRODUCT 408 #ifndef PRODUCT
408 class AllocStats : StackObj { 409 class AllocStats : StackObj {
409 int start_mallocs, start_frees; 410 julong start_mallocs, start_frees;
410 size_t start_malloc_bytes, start_res_bytes; 411 julong start_malloc_bytes, start_mfree_bytes, start_res_bytes;
411 public: 412 public:
412 AllocStats(); 413 AllocStats();
413 414
414 int num_mallocs(); // since creation of receiver 415 julong num_mallocs(); // since creation of receiver
415 size_t alloc_bytes(); 416 julong alloc_bytes();
416 size_t resource_bytes(); 417 julong num_frees();
417 int num_frees(); 418 julong free_bytes();
419 julong resource_bytes();
418 void print(); 420 void print();
419 }; 421 };
420 #endif 422 #endif
421 423
422 424