comparison src/share/vm/memory/allocation.hpp @ 6872:7b5885dadbdc

8000617: It should be possible to allocate memory without the VM dying. Reviewed-by: coleenp, kamg
author nloodin
date Wed, 17 Oct 2012 17:36:48 +0200
parents fb19af007ffc
children 716c64bda5ba
comparison
equal deleted inserted replaced
6871:045cb62046a7 6872:7b5885dadbdc
50 #define _NOINLINE_ 50 #define _NOINLINE_
51 #else 51 #else
52 #define _NOINLINE_ __attribute__ ((noinline)) 52 #define _NOINLINE_ __attribute__ ((noinline))
53 #endif 53 #endif
54 #endif 54 #endif
55
56 class AllocFailStrategy {
57 public:
58 enum AllocFailEnum { EXIT_OOM, RETURN_NULL };
59 };
60 typedef AllocFailStrategy::AllocFailEnum AllocFailType;
55 61
56 // All classes in the virtual machine must be subclassed 62 // All classes in the virtual machine must be subclassed
57 // by one of the following allocation classes: 63 // by one of the following allocation classes:
58 // 64 //
59 // For objects allocated in the resource area (see resourceArea.hpp). 65 // For objects allocated in the resource area (see resourceArea.hpp).
313 friend class VMStructs; 319 friend class VMStructs;
314 320
315 Chunk *_first; // First chunk 321 Chunk *_first; // First chunk
316 Chunk *_chunk; // current chunk 322 Chunk *_chunk; // current chunk
317 char *_hwm, *_max; // High water mark and max in current chunk 323 char *_hwm, *_max; // High water mark and max in current chunk
318 void* grow(size_t x); // Get a new Chunk of at least size x 324 // Get a new Chunk of at least size x
325 void* grow(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
319 size_t _size_in_bytes; // Size of arena (used for native memory tracking) 326 size_t _size_in_bytes; // Size of arena (used for native memory tracking)
320 327
321 NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start 328 NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
322 friend class AllocStats; 329 friend class AllocStats;
323 debug_only(void* malloc(size_t size);) 330 debug_only(void* malloc(size_t size);)
348 void* operator new(size_t size, MEMFLAGS flags); 355 void* operator new(size_t size, MEMFLAGS flags);
349 void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags); 356 void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags);
350 void operator delete(void* p); 357 void operator delete(void* p);
351 358
352 // Fast allocate in the arena. Common case is: pointer test + increment. 359 // Fast allocate in the arena. Common case is: pointer test + increment.
353 void* Amalloc(size_t x) { 360 void* Amalloc(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
354 assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2"); 361 assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
355 x = ARENA_ALIGN(x); 362 x = ARENA_ALIGN(x);
356 debug_only(if (UseMallocOnly) return malloc(x);) 363 debug_only(if (UseMallocOnly) return malloc(x);)
357 check_for_overflow(x, "Arena::Amalloc"); 364 check_for_overflow(x, "Arena::Amalloc");
358 NOT_PRODUCT(inc_bytes_allocated(x);) 365 NOT_PRODUCT(inc_bytes_allocated(x);)
359 if (_hwm + x > _max) { 366 if (_hwm + x > _max) {
360 return grow(x); 367 return grow(x, alloc_failmode);
361 } else { 368 } else {
362 char *old = _hwm; 369 char *old = _hwm;
363 _hwm += x; 370 _hwm += x;
364 return old; 371 return old;
365 } 372 }
366 } 373 }
367 // Further assume size is padded out to words 374 // Further assume size is padded out to words
368 void *Amalloc_4(size_t x) { 375 void *Amalloc_4(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
369 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" ); 376 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
370 debug_only(if (UseMallocOnly) return malloc(x);) 377 debug_only(if (UseMallocOnly) return malloc(x);)
371 check_for_overflow(x, "Arena::Amalloc_4"); 378 check_for_overflow(x, "Arena::Amalloc_4");
372 NOT_PRODUCT(inc_bytes_allocated(x);) 379 NOT_PRODUCT(inc_bytes_allocated(x);)
373 if (_hwm + x > _max) { 380 if (_hwm + x > _max) {
374 return grow(x); 381 return grow(x, alloc_failmode);
375 } else { 382 } else {
376 char *old = _hwm; 383 char *old = _hwm;
377 _hwm += x; 384 _hwm += x;
378 return old; 385 return old;
379 } 386 }
380 } 387 }
381 388
382 // Allocate with 'double' alignment. It is 8 bytes on sparc. 389 // Allocate with 'double' alignment. It is 8 bytes on sparc.
383 // In other cases Amalloc_D() should be the same as Amalloc_4(). 390 // In other cases Amalloc_D() should be the same as Amalloc_4().
384 void* Amalloc_D(size_t x) { 391 void* Amalloc_D(size_t x, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) {
385 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" ); 392 assert( (x&(sizeof(char*)-1)) == 0, "misaligned size" );
386 debug_only(if (UseMallocOnly) return malloc(x);) 393 debug_only(if (UseMallocOnly) return malloc(x);)
387 #if defined(SPARC) && !defined(_LP64) 394 #if defined(SPARC) && !defined(_LP64)
388 #define DALIGN_M1 7 395 #define DALIGN_M1 7
389 size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm; 396 size_t delta = (((size_t)_hwm + DALIGN_M1) & ~DALIGN_M1) - (size_t)_hwm;
390 x += delta; 397 x += delta;
391 #endif 398 #endif
392 check_for_overflow(x, "Arena::Amalloc_D"); 399 check_for_overflow(x, "Arena::Amalloc_D");
393 NOT_PRODUCT(inc_bytes_allocated(x);) 400 NOT_PRODUCT(inc_bytes_allocated(x);)
394 if (_hwm + x > _max) { 401 if (_hwm + x > _max) {
395 return grow(x); // grow() returns a result aligned >= 8 bytes. 402 return grow(x, alloc_failmode); // grow() returns a result aligned >= 8 bytes.
396 } else { 403 } else {
397 char *old = _hwm; 404 char *old = _hwm;
398 _hwm += x; 405 _hwm += x;
399 #if defined(SPARC) && !defined(_LP64) 406 #if defined(SPARC) && !defined(_LP64)
400 old += delta; // align to 8-bytes 407 old += delta; // align to 8-bytes
410 if (UseMallocOnly) return; 417 if (UseMallocOnly) return;
411 #endif 418 #endif
412 if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr; 419 if (((char*)ptr) + size == _hwm) _hwm = (char*)ptr;
413 } 420 }
414 421
415 void *Arealloc( void *old_ptr, size_t old_size, size_t new_size ); 422 void *Arealloc( void *old_ptr, size_t old_size, size_t new_size,
423 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
416 424
417 // Move contents of this arena into an empty arena 425 // Move contents of this arena into an empty arena
418 Arena *move_contents(Arena *empty_arena); 426 Arena *move_contents(Arena *empty_arena);
419 427
420 // Determine if pointer belongs to this Arena or not. 428 // Determine if pointer belongs to this Arena or not.
456 #define NEW_ARENA_OBJ(arena, type) \ 464 #define NEW_ARENA_OBJ(arena, type) \
457 NEW_ARENA_ARRAY(arena, type, 1) 465 NEW_ARENA_ARRAY(arena, type, 1)
458 466
459 467
460 //%note allocation_1 468 //%note allocation_1
461 extern char* resource_allocate_bytes(size_t size); 469 extern char* resource_allocate_bytes(size_t size,
462 extern char* resource_allocate_bytes(Thread* thread, size_t size); 470 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
463 extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size); 471 extern char* resource_allocate_bytes(Thread* thread, size_t size,
472 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
473 extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size,
474 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM);
464 extern void resource_free_bytes( char *old, size_t size ); 475 extern void resource_free_bytes( char *old, size_t size );
465 476
466 //---------------------------------------------------------------------- 477 //----------------------------------------------------------------------
467 // Base class for objects allocated in the resource area per default. 478 // Base class for objects allocated in the resource area per default.
468 // Optionally, objects may be allocated on the C heap with 479 // Optionally, objects may be allocated on the C heap with
494 ~ResourceObj(); 505 ~ResourceObj();
495 #endif // ASSERT 506 #endif // ASSERT
496 507
497 public: 508 public:
498 void* operator new(size_t size, allocation_type type, MEMFLAGS flags); 509 void* operator new(size_t size, allocation_type type, MEMFLAGS flags);
510 void* operator new(size_t size, const std::nothrow_t& nothrow_constant,
511 allocation_type type, MEMFLAGS flags);
499 void* operator new(size_t size, Arena *arena) { 512 void* operator new(size_t size, Arena *arena) {
500 address res = (address)arena->Amalloc(size); 513 address res = (address)arena->Amalloc(size);
501 DEBUG_ONLY(set_allocation_type(res, ARENA);) 514 DEBUG_ONLY(set_allocation_type(res, ARENA);)
502 return res; 515 return res;
503 } 516 }
504 void* operator new(size_t size) { 517 void* operator new(size_t size) {
505 address res = (address)resource_allocate_bytes(size); 518 address res = (address)resource_allocate_bytes(size);
506 DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);) 519 DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);)
507 return res; 520 return res;
508 } 521 }
522
523 void* operator new(size_t size, const std::nothrow_t& nothrow_constant) {
524 address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL);
525 DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);)
526 return res;
527 }
528
509 void operator delete(void* p); 529 void operator delete(void* p);
510 }; 530 };
511 531
512 // One of the following macros must be used when allocating an array 532 // One of the following macros must be used when allocating an array
513 // or object to determine whether it should reside in the C heap on in 533 // or object to determine whether it should reside in the C heap on in