Mercurial > emacs
comparison src/gmalloc.c @ 82308:7856255721ef
(_malloc_thread_enabled_p) [USE_PTHREAD]: New variable.
[USE_PTHREAD] (LOCK, UNLOCK, LOCK_ALIGNED_BLOCKS)
(UNLOCK_ALIGNED_BLOCKS): Conditionalize with it.
(malloc_atfork_handler_prepare, malloc_atfork_handler_parent)
(malloc_atfork_handler_child, malloc_enable_thread) [USE_PTHREAD]:
New functions.
author | YAMAMOTO Mitsuharu <mituharu@math.s.chiba-u.ac.jp> |
---|---|
date | Tue, 07 Aug 2007 08:55:43 +0000 |
parents | 8cc259bc54fd |
children | 310b4cdcc703 |
comparison
equal
deleted
inserted
replaced
82307:3a38f5a481f0 | 82308:7856255721ef |
---|---|
134 /* Allocate SIZE bytes on a page boundary. */ | 134 /* Allocate SIZE bytes on a page boundary. */ |
135 #if ! (defined (_MALLOC_INTERNAL) && defined (GMALLOC_INHIBIT_VALLOC)) | 135 #if ! (defined (_MALLOC_INTERNAL) && defined (GMALLOC_INHIBIT_VALLOC)) |
136 extern __ptr_t valloc PP ((__malloc_size_t __size)); | 136 extern __ptr_t valloc PP ((__malloc_size_t __size)); |
137 #endif | 137 #endif |
138 | 138 |
139 #ifdef USE_PTHREAD | |
140 /* Set up mutexes and make malloc etc. thread-safe. */ | |
141 extern void malloc_enable_thread PP ((void)); | |
142 #endif | |
139 | 143 |
140 #ifdef _MALLOC_INTERNAL | 144 #ifdef _MALLOC_INTERNAL |
141 | 145 |
142 /* The allocator divides the heap into blocks of fixed size; large | 146 /* The allocator divides the heap into blocks of fixed size; large |
143 requests receive one or more whole blocks, and small requests | 147 requests receive one or more whole blocks, and small requests |
240 extern __ptr_t _realloc_internal_nolock PP ((__ptr_t __ptr, __malloc_size_t __size)); | 244 extern __ptr_t _realloc_internal_nolock PP ((__ptr_t __ptr, __malloc_size_t __size)); |
241 extern void _free_internal_nolock PP ((__ptr_t __ptr)); | 245 extern void _free_internal_nolock PP ((__ptr_t __ptr)); |
242 | 246 |
243 #ifdef USE_PTHREAD | 247 #ifdef USE_PTHREAD |
244 extern pthread_mutex_t _malloc_mutex, _aligned_blocks_mutex; | 248 extern pthread_mutex_t _malloc_mutex, _aligned_blocks_mutex; |
245 #define LOCK() pthread_mutex_lock (&_malloc_mutex) | 249 extern int _malloc_thread_enabled_p; |
246 #define UNLOCK() pthread_mutex_unlock (&_malloc_mutex) | 250 #define LOCK() \ |
247 #define LOCK_ALIGNED_BLOCKS() pthread_mutex_lock (&_aligned_blocks_mutex) | 251 do { \ |
248 #define UNLOCK_ALIGNED_BLOCKS() pthread_mutex_unlock (&_aligned_blocks_mutex) | 252 if (_malloc_thread_enabled_p) \ |
253 pthread_mutex_lock (&_malloc_mutex); \ | |
254 } while (0) | |
255 #define UNLOCK() \ | |
256 do { \ | |
257 if (_malloc_thread_enabled_p) \ | |
258 pthread_mutex_unlock (&_malloc_mutex); \ | |
259 } while (0) | |
260 #define LOCK_ALIGNED_BLOCKS() \ | |
261 do { \ | |
262 if (_malloc_thread_enabled_p) \ | |
263 pthread_mutex_lock (&_aligned_blocks_mutex); \ | |
264 } while (0) | |
265 #define UNLOCK_ALIGNED_BLOCKS() \ | |
266 do { \ | |
267 if (_malloc_thread_enabled_p) \ | |
268 pthread_mutex_unlock (&_aligned_blocks_mutex); \ | |
269 } while (0) | |
249 #else | 270 #else |
250 #define LOCK() | 271 #define LOCK() |
251 #define UNLOCK() | 272 #define UNLOCK() |
252 #define LOCK_ALIGNED_BLOCKS() | 273 #define LOCK_ALIGNED_BLOCKS() |
253 #define UNLOCK_ALIGNED_BLOCKS() | 274 #define UNLOCK_ALIGNED_BLOCKS() |
561 } | 582 } |
562 | 583 |
563 #ifdef USE_PTHREAD | 584 #ifdef USE_PTHREAD |
564 pthread_mutex_t _malloc_mutex = PTHREAD_MUTEX_INITIALIZER; | 585 pthread_mutex_t _malloc_mutex = PTHREAD_MUTEX_INITIALIZER; |
565 pthread_mutex_t _aligned_blocks_mutex = PTHREAD_MUTEX_INITIALIZER; | 586 pthread_mutex_t _aligned_blocks_mutex = PTHREAD_MUTEX_INITIALIZER; |
587 int _malloc_thread_enabled_p; | |
588 | |
589 static void | |
590 malloc_atfork_handler_prepare () | |
591 { | |
592 LOCK (); | |
593 LOCK_ALIGNED_BLOCKS (); | |
594 } | |
595 | |
596 static void | |
597 malloc_atfork_handler_parent () | |
598 { | |
599 UNLOCK_ALIGNED_BLOCKS (); | |
600 UNLOCK (); | |
601 } | |
602 | |
603 static void | |
604 malloc_atfork_handler_child () | |
605 { | |
606 UNLOCK_ALIGNED_BLOCKS (); | |
607 UNLOCK (); | |
608 } | |
609 | |
610 /* Set up mutexes and make malloc etc. thread-safe. */ | |
611 void | |
612 malloc_enable_thread () | |
613 { | |
614 if (_malloc_thread_enabled_p) | |
615 return; | |
616 | |
617 /* Some pthread implementations call malloc for statically | |
618 initialized mutexes when they are used first. To avoid such a | |
619 situation, we initialize mutexes here while their use is | |
620 disabled in malloc etc. */ | |
621 pthread_mutex_init (&_malloc_mutex, NULL); | |
622 pthread_mutex_init (&_aligned_blocks_mutex, NULL); | |
623 pthread_atfork (malloc_atfork_handler_prepare, | |
624 malloc_atfork_handler_parent, | |
625 malloc_atfork_handler_child); | |
626 _malloc_thread_enabled_p = 1; | |
627 } | |
566 #endif | 628 #endif |
567 | 629 |
568 static void | 630 static void |
569 malloc_initialize_1 () | 631 malloc_initialize_1 () |
570 { | 632 { |
572 mcheck (NULL); | 634 mcheck (NULL); |
573 #endif | 635 #endif |
574 | 636 |
575 if (__malloc_initialize_hook) | 637 if (__malloc_initialize_hook) |
576 (*__malloc_initialize_hook) (); | 638 (*__malloc_initialize_hook) (); |
577 | |
578 /* We don't use recursive mutex because pthread_mutexattr_init may | |
579 call malloc internally. */ | |
580 #if 0 /* defined (USE_PTHREAD) */ | |
581 { | |
582 pthread_mutexattr_t attr; | |
583 | |
584 pthread_mutexattr_init (&attr); | |
585 pthread_mutexattr_settype (&attr, PTHREAD_MUTEX_RECURSIVE); | |
586 pthread_mutex_init (&_malloc_mutex, &attr); | |
587 pthread_mutexattr_destroy (&attr); | |
588 } | |
589 #endif | |
590 | 639 |
591 heapsize = HEAP / BLOCKSIZE; | 640 heapsize = HEAP / BLOCKSIZE; |
592 _heapinfo = (malloc_info *) align (heapsize * sizeof (malloc_info)); | 641 _heapinfo = (malloc_info *) align (heapsize * sizeof (malloc_info)); |
593 if (_heapinfo == NULL) | 642 if (_heapinfo == NULL) |
594 return; | 643 return; |