Unbreak ia64: tls_model("initial-exec") is invalid, because it assumes

the static TLS model, which is fundamentally different from the dynamic
TLS model. The consequence was data corruption. Limit the attribute to
i386 and amd64.
This commit is contained in:
marcel 2010-02-16 06:47:00 +00:00
parent b72bfb8c31
commit b3a749c0d1

View File

@ -215,10 +215,12 @@ __FBSDID("$FreeBSD$");
# define LG_QUANTUM 4 # define LG_QUANTUM 4
# define LG_SIZEOF_PTR 2 # define LG_SIZEOF_PTR 2
# define CPU_SPINWAIT __asm__ volatile("pause") # define CPU_SPINWAIT __asm__ volatile("pause")
# define TLS_MODEL __attribute__((tls_model("initial-exec")))
#endif #endif
#ifdef __ia64__ #ifdef __ia64__
# define LG_QUANTUM 4 # define LG_QUANTUM 4
# define LG_SIZEOF_PTR 3 # define LG_SIZEOF_PTR 3
# define TLS_MODEL /* default */
#endif #endif
#ifdef __alpha__ #ifdef __alpha__
# define LG_QUANTUM 4 # define LG_QUANTUM 4
@ -234,6 +236,7 @@ __FBSDID("$FreeBSD$");
# define LG_QUANTUM 4 # define LG_QUANTUM 4
# define LG_SIZEOF_PTR 3 # define LG_SIZEOF_PTR 3
# define CPU_SPINWAIT __asm__ volatile("pause") # define CPU_SPINWAIT __asm__ volatile("pause")
# define TLS_MODEL __attribute__((tls_model("initial-exec")))
#endif #endif
#ifdef __arm__ #ifdef __arm__
# define LG_QUANTUM 3 # define LG_QUANTUM 3
@ -1090,14 +1093,12 @@ static pthread_mutex_t arenas_lock; /* Protects arenas initialization. */
* Map of _pthread_self() --> arenas[???], used for selecting an arena to use * Map of _pthread_self() --> arenas[???], used for selecting an arena to use
* for allocations. * for allocations.
*/ */
static __thread arena_t *arenas_map static __thread arena_t *arenas_map TLS_MODEL;
__attribute__((tls_model("initial-exec")));
#endif #endif
#ifdef MALLOC_TCACHE #ifdef MALLOC_TCACHE
/* Map of thread-specific caches. */ /* Map of thread-specific caches. */
static __thread tcache_t *tcache_tls static __thread tcache_t *tcache_tls TLS_MODEL;
__attribute__((tls_model("initial-exec")));
/* /*
* Number of cache slots for each bin in the thread cache, or 0 if tcache is * Number of cache slots for each bin in the thread cache, or 0 if tcache is
@ -1115,15 +1116,12 @@ unsigned tcache_gc_incr;
* since the state of mmap_unaligned only affects performance, rather than * since the state of mmap_unaligned only affects performance, rather than
* correct function. * correct function.
*/ */
static
#ifndef NO_TLS #ifndef NO_TLS
__thread static __thread bool mmap_unaligned TLS_MODEL;
#else
static bool mmap_unaligned;
#endif #endif
bool mmap_unaligned
#ifndef NO_TLS
__attribute__((tls_model("initial-exec")))
#endif
;
#ifdef MALLOC_STATS #ifdef MALLOC_STATS
static malloc_mutex_t chunks_mtx; static malloc_mutex_t chunks_mtx;
/* Chunk statistics. */ /* Chunk statistics. */