Some cleanups and tweaks to some of the atomic.h files in preparation for
further changes and fixes in the future: - Use aliases via macros rather than duplicated inlines wherever possible. - Move all the aliases to the bottom of these files and the inline functions to the top. - Add various comments. - On alpha, drop atomic_{load_acq,store_rel}_{8,char,16,short}(). - On i386 and amd64, don't duplicate the extern declarations for functions in the two non-inline cases (KLD_MODULE and compiler doesn't do inlines), instead, consolidate those two cases. - Some whitespace fixes. Approved by: re (scottl)
This commit is contained in:
parent
02295eedc7
commit
48281036d7
@ -27,7 +27,7 @@
|
||||
*/
|
||||
|
||||
#ifndef _MACHINE_ATOMIC_H_
|
||||
#define _MACHINE_ATOMIC_H_
|
||||
#define _MACHINE_ATOMIC_H_
|
||||
|
||||
#ifndef _SYS_CDEFS_H_
|
||||
#error this file needs sys/cdefs.h as a prerequisite
|
||||
@ -216,29 +216,7 @@ static __inline u_int64_t atomic_readandclear_64(volatile u_int64_t *addr)
|
||||
return result;
|
||||
}
|
||||
|
||||
#define atomic_set_char atomic_set_8
|
||||
#define atomic_clear_char atomic_clear_8
|
||||
#define atomic_add_char atomic_add_8
|
||||
#define atomic_subtract_char atomic_subtract_8
|
||||
|
||||
#define atomic_set_short atomic_set_16
|
||||
#define atomic_clear_short atomic_clear_16
|
||||
#define atomic_add_short atomic_add_16
|
||||
#define atomic_subtract_short atomic_subtract_16
|
||||
|
||||
#define atomic_set_int atomic_set_32
|
||||
#define atomic_clear_int atomic_clear_32
|
||||
#define atomic_add_int atomic_add_32
|
||||
#define atomic_subtract_int atomic_subtract_32
|
||||
#define atomic_readandclear_int atomic_readandclear_32
|
||||
|
||||
#define atomic_set_long atomic_set_64
|
||||
#define atomic_clear_long atomic_clear_64
|
||||
#define atomic_add_long atomic_add_64
|
||||
#define atomic_subtract_long atomic_subtract_64
|
||||
#define atomic_readandclear_long atomic_readandclear_64
|
||||
|
||||
#define ATOMIC_ACQ_REL(NAME, WIDTH, TYPE) \
|
||||
#define ATOMIC_ACQ_REL(NAME, WIDTH) \
|
||||
static __inline void \
|
||||
atomic_##NAME##_acq_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
|
||||
{ \
|
||||
@ -248,52 +226,39 @@ atomic_##NAME##_acq_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
|
||||
\
|
||||
static __inline void \
|
||||
atomic_##NAME##_rel_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
|
||||
{ \
|
||||
alpha_mb(); \
|
||||
atomic_##NAME##_##WIDTH(p, v); \
|
||||
} \
|
||||
\
|
||||
static __inline void \
|
||||
atomic_##NAME##_acq_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
|
||||
{ \
|
||||
atomic_##NAME##_##WIDTH(p, v); \
|
||||
alpha_mb(); \
|
||||
} \
|
||||
\
|
||||
static __inline void \
|
||||
atomic_##NAME##_rel_##TYPE(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
|
||||
{ \
|
||||
alpha_mb(); \
|
||||
atomic_##NAME##_##WIDTH(p, v); \
|
||||
}
|
||||
|
||||
ATOMIC_ACQ_REL(set, 8, char)
|
||||
ATOMIC_ACQ_REL(clear, 8, char)
|
||||
ATOMIC_ACQ_REL(add, 8, char)
|
||||
ATOMIC_ACQ_REL(subtract, 8, char)
|
||||
ATOMIC_ACQ_REL(set, 16, short)
|
||||
ATOMIC_ACQ_REL(clear, 16, short)
|
||||
ATOMIC_ACQ_REL(add, 16, short)
|
||||
ATOMIC_ACQ_REL(subtract, 16, short)
|
||||
ATOMIC_ACQ_REL(set, 32, int)
|
||||
ATOMIC_ACQ_REL(clear, 32, int)
|
||||
ATOMIC_ACQ_REL(add, 32, int)
|
||||
ATOMIC_ACQ_REL(subtract, 32, int)
|
||||
ATOMIC_ACQ_REL(set, 64, long)
|
||||
ATOMIC_ACQ_REL(clear, 64, long)
|
||||
ATOMIC_ACQ_REL(add, 64, long)
|
||||
ATOMIC_ACQ_REL(subtract, 64, long)
|
||||
/* Variants of simple arithmetic with memory barriers. */
|
||||
ATOMIC_ACQ_REL(set, 8)
|
||||
ATOMIC_ACQ_REL(clear, 8)
|
||||
ATOMIC_ACQ_REL(add, 8)
|
||||
ATOMIC_ACQ_REL(subtract, 8)
|
||||
ATOMIC_ACQ_REL(set, 16)
|
||||
ATOMIC_ACQ_REL(clear, 16)
|
||||
ATOMIC_ACQ_REL(add, 16)
|
||||
ATOMIC_ACQ_REL(subtract, 16)
|
||||
ATOMIC_ACQ_REL(set, 32)
|
||||
ATOMIC_ACQ_REL(clear, 32)
|
||||
ATOMIC_ACQ_REL(add, 32)
|
||||
ATOMIC_ACQ_REL(subtract, 32)
|
||||
ATOMIC_ACQ_REL(set, 64)
|
||||
ATOMIC_ACQ_REL(clear, 64)
|
||||
ATOMIC_ACQ_REL(add, 64)
|
||||
ATOMIC_ACQ_REL(subtract, 64)
|
||||
|
||||
#undef ATOMIC_ACQ_REL
|
||||
|
||||
/*
|
||||
* We assume that a = b will do atomic loads and stores.
|
||||
*/
|
||||
#define ATOMIC_STORE_LOAD(TYPE, WIDTH) \
|
||||
static __inline u_##TYPE \
|
||||
atomic_load_acq_##WIDTH(volatile u_##TYPE *p) \
|
||||
#define ATOMIC_STORE_LOAD(WIDTH) \
|
||||
static __inline u_int##WIDTH##_t \
|
||||
atomic_load_acq_##WIDTH(volatile u_int##WIDTH##_t *p) \
|
||||
{ \
|
||||
u_##TYPE v; \
|
||||
u_int##WIDTH##_t v; \
|
||||
\
|
||||
v = *p; \
|
||||
alpha_mb(); \
|
||||
@ -301,32 +266,14 @@ atomic_load_acq_##WIDTH(volatile u_##TYPE *p) \
|
||||
} \
|
||||
\
|
||||
static __inline void \
|
||||
atomic_store_rel_##WIDTH(volatile u_##TYPE *p, u_##TYPE v)\
|
||||
{ \
|
||||
alpha_mb(); \
|
||||
*p = v; \
|
||||
} \
|
||||
static __inline u_##TYPE \
|
||||
atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
|
||||
{ \
|
||||
u_##TYPE v; \
|
||||
\
|
||||
v = *p; \
|
||||
alpha_mb(); \
|
||||
return (v); \
|
||||
} \
|
||||
\
|
||||
static __inline void \
|
||||
atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
|
||||
atomic_store_rel_##WIDTH(volatile u_int##WIDTH##_t *p, u_int##WIDTH##_t v)\
|
||||
{ \
|
||||
alpha_mb(); \
|
||||
*p = v; \
|
||||
}
|
||||
|
||||
ATOMIC_STORE_LOAD(char, 8)
|
||||
ATOMIC_STORE_LOAD(short, 16)
|
||||
ATOMIC_STORE_LOAD(int, 32)
|
||||
ATOMIC_STORE_LOAD(long, 64)
|
||||
ATOMIC_STORE_LOAD(32)
|
||||
ATOMIC_STORE_LOAD(64)
|
||||
|
||||
#undef ATOMIC_STORE_LOAD
|
||||
|
||||
@ -384,17 +331,6 @@ atomic_cmpset_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
|
||||
return ret;
|
||||
}
|
||||
|
||||
#define atomic_cmpset_int atomic_cmpset_32
|
||||
#define atomic_cmpset_long atomic_cmpset_64
|
||||
|
||||
static __inline int
|
||||
atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
|
||||
{
|
||||
|
||||
return (atomic_cmpset_long((volatile u_long *)dst, (u_long)exp,
|
||||
(u_long)src));
|
||||
}
|
||||
|
||||
static __inline u_int32_t
|
||||
atomic_cmpset_acq_32(volatile u_int32_t *p, u_int32_t cmpval, u_int32_t newval)
|
||||
{
|
||||
@ -429,10 +365,82 @@ atomic_cmpset_rel_64(volatile u_int64_t *p, u_int64_t cmpval, u_int64_t newval)
|
||||
return (atomic_cmpset_64(p, cmpval, newval));
|
||||
}
|
||||
|
||||
/* Operations on chars. */
|
||||
#define atomic_set_char atomic_set_8
|
||||
#define atomic_set_acq_char atomic_set_acq_8
|
||||
#define atomic_set_rel_char atomic_set_rel_8
|
||||
#define atomic_clear_char atomic_clear_8
|
||||
#define atomic_clear_acq_char atomic_clear_acq_8
|
||||
#define atomic_clear_rel_char atomic_clear_rel_8
|
||||
#define atomic_add_char atomic_add_8
|
||||
#define atomic_add_acq_char atomic_add_acq_8
|
||||
#define atomic_add_rel_char atomic_add_rel_8
|
||||
#define atomic_subtract_char atomic_subtract_8
|
||||
#define atomic_subtract_acq_char atomic_subtract_acq_8
|
||||
#define atomic_subtract_rel_char atomic_subtract_rel_8
|
||||
|
||||
/* Operations on shorts. */
|
||||
#define atomic_set_short atomic_set_16
|
||||
#define atomic_set_acq_short atomic_set_acq_16
|
||||
#define atomic_set_rel_short atomic_set_rel_16
|
||||
#define atomic_clear_short atomic_clear_16
|
||||
#define atomic_clear_acq_short atomic_clear_acq_16
|
||||
#define atomic_clear_rel_short atomic_clear_rel_16
|
||||
#define atomic_add_short atomic_add_16
|
||||
#define atomic_add_acq_short atomic_add_acq_16
|
||||
#define atomic_add_rel_short atomic_add_rel_16
|
||||
#define atomic_subtract_short atomic_subtract_16
|
||||
#define atomic_subtract_acq_short atomic_subtract_acq_16
|
||||
#define atomic_subtract_rel_short atomic_subtract_rel_16
|
||||
|
||||
/* Operations on ints. */
|
||||
#define atomic_set_int atomic_set_32
|
||||
#define atomic_set_acq_int atomic_set_acq_32
|
||||
#define atomic_set_rel_int atomic_set_rel_32
|
||||
#define atomic_clear_int atomic_clear_32
|
||||
#define atomic_clear_acq_int atomic_clear_acq_32
|
||||
#define atomic_clear_rel_int atomic_clear_rel_32
|
||||
#define atomic_add_int atomic_add_32
|
||||
#define atomic_add_acq_int atomic_add_acq_32
|
||||
#define atomic_add_rel_int atomic_add_rel_32
|
||||
#define atomic_subtract_int atomic_subtract_32
|
||||
#define atomic_subtract_acq_int atomic_subtract_acq_32
|
||||
#define atomic_subtract_rel_int atomic_subtract_rel_32
|
||||
#define atomic_cmpset_int atomic_cmpset_32
|
||||
#define atomic_cmpset_acq_int atomic_cmpset_acq_32
|
||||
#define atomic_cmpset_rel_int atomic_cmpset_rel_32
|
||||
#define atomic_load_acq_int atomic_load_acq_32
|
||||
#define atomic_store_rel_int atomic_store_rel_32
|
||||
#define atomic_readandclear_int atomic_readandclear_32
|
||||
|
||||
/* Operations on longs. */
|
||||
#define atomic_set_long atomic_set_64
|
||||
#define atomic_set_acq_long atomic_set_acq_64
|
||||
#define atomic_set_rel_long atomic_set_rel_64
|
||||
#define atomic_clear_long atomic_clear_64
|
||||
#define atomic_clear_acq_long atomic_clear_acq_64
|
||||
#define atomic_clear_rel_long atomic_clear_rel_64
|
||||
#define atomic_add_long atomic_add_64
|
||||
#define atomic_add_acq_long atomic_add_acq_64
|
||||
#define atomic_add_rel_long atomic_add_rel_64
|
||||
#define atomic_subtract_long atomic_subtract_64
|
||||
#define atomic_subtract_acq_long atomic_subtract_acq_64
|
||||
#define atomic_subtract_rel_long atomic_subtract_rel_64
|
||||
#define atomic_cmpset_long atomic_cmpset_64
|
||||
#define atomic_cmpset_acq_long atomic_cmpset_acq_64
|
||||
#define atomic_cmpset_rel_long atomic_cmpset_rel_64
|
||||
#define atomic_load_acq_long atomic_load_acq_64
|
||||
#define atomic_store_rel_long atomic_store_rel_64
|
||||
#define atomic_readandclear_long atomic_readandclear_64
|
||||
|
||||
/* Operations on pointers. */
|
||||
static __inline int
|
||||
atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
|
||||
{
|
||||
|
||||
return (atomic_cmpset_long((volatile u_long *)dst, (u_long)exp,
|
||||
(u_long)src));
|
||||
}
|
||||
|
||||
static __inline int
|
||||
atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src)
|
||||
@ -462,7 +470,7 @@ atomic_store_rel_ptr(volatile void *p, void *v)
|
||||
atomic_store_rel_long((volatile u_long *)p, (u_long)v);
|
||||
}
|
||||
|
||||
#define ATOMIC_PTR(NAME) \
|
||||
#define ATOMIC_PTR(NAME) \
|
||||
static __inline void \
|
||||
atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
|
||||
{ \
|
||||
|
@ -26,7 +26,7 @@
|
||||
* $FreeBSD$
|
||||
*/
|
||||
#ifndef _MACHINE_ATOMIC_H_
|
||||
#define _MACHINE_ATOMIC_H_
|
||||
#define _MACHINE_ATOMIC_H_
|
||||
|
||||
#ifndef _SYS_CDEFS_H_
|
||||
#error this file needs sys/cdefs.h as a prerequisite
|
||||
@ -67,8 +67,8 @@
|
||||
* Kernel modules call real functions which are built into the kernel.
|
||||
* This allows kernel modules to be portable between UP and SMP systems.
|
||||
*/
|
||||
#if defined(KLD_MODULE)
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
#if defined(KLD_MODULE) || !(defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE))
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
|
||||
@ -78,25 +78,23 @@ int atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src);
|
||||
u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
|
||||
void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
#else /* !KLD_MODULE */
|
||||
|
||||
#if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)
|
||||
#else /* !KLD_MODULE && __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
|
||||
|
||||
/*
|
||||
* For userland, assume the SMP case and use lock prefixes so that
|
||||
* the binaries will run on both types of systems.
|
||||
*/
|
||||
#if defined(SMP) || !defined(_KERNEL)
|
||||
#define MPLOCKED lock ;
|
||||
#define MPLOCKED lock ;
|
||||
#else
|
||||
#define MPLOCKED
|
||||
#define MPLOCKED
|
||||
#endif
|
||||
|
||||
/*
|
||||
* The assembly is volatilized to demark potential before-and-after side
|
||||
* effects if an interrupt or SMP collision were to occur.
|
||||
*/
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
static __inline void \
|
||||
atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
|
||||
{ \
|
||||
@ -106,13 +104,6 @@ atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
|
||||
} \
|
||||
struct __hack
|
||||
|
||||
#else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
|
||||
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
#endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
|
||||
|
||||
/*
|
||||
* Atomic compare and set, used by the mutex functions
|
||||
*
|
||||
@ -121,8 +112,6 @@ extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
* Returns 0 on failure, non-zero on success
|
||||
*/
|
||||
|
||||
#if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)
|
||||
|
||||
static __inline int
|
||||
atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
|
||||
{
|
||||
@ -162,11 +151,8 @@ atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
|
||||
|
||||
return (res);
|
||||
}
|
||||
#endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
|
||||
|
||||
#if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)
|
||||
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
static __inline u_##TYPE \
|
||||
atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
|
||||
{ \
|
||||
@ -193,18 +179,7 @@ atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
|
||||
} \
|
||||
struct __hack
|
||||
|
||||
#else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
|
||||
|
||||
extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
|
||||
extern int atomic_cmpset_long(volatile u_long *, u_long, u_long);
|
||||
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
|
||||
extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
#endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
|
||||
|
||||
#endif /* KLD_MODULE */
|
||||
#endif /* KLD_MODULE || !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
|
||||
|
||||
ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
|
||||
ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v);
|
||||
@ -234,6 +209,49 @@ ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
|
||||
#undef ATOMIC_ASM
|
||||
#undef ATOMIC_STORE_LOAD
|
||||
|
||||
#if !defined(WANT_FUNCTIONS)
|
||||
|
||||
/* Read the current value and store a zero in the destination. */
|
||||
#if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)
|
||||
|
||||
static __inline u_int
|
||||
atomic_readandclear_int(volatile u_int *addr)
|
||||
{
|
||||
u_int result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorl %0,%0 ; "
|
||||
" xchgl %1,%0 ; "
|
||||
"# atomic_readandclear_int"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
static __inline u_long
|
||||
atomic_readandclear_long(volatile u_long *addr)
|
||||
{
|
||||
u_long result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorq %0,%0 ; "
|
||||
" xchgq %1,%0 ; "
|
||||
"# atomic_readandclear_long"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
#else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
|
||||
|
||||
u_int atomic_readandclear_int(volatile u_int *);
|
||||
u_long atomic_readandclear_long(volatile u_long *);
|
||||
|
||||
#endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
|
||||
|
||||
/* Acquire and release variants are identical to the normal ones. */
|
||||
#define atomic_set_acq_char atomic_set_char
|
||||
#define atomic_set_rel_char atomic_set_char
|
||||
#define atomic_clear_acq_char atomic_clear_char
|
||||
@ -260,8 +278,8 @@ ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
|
||||
#define atomic_add_rel_int atomic_add_int
|
||||
#define atomic_subtract_acq_int atomic_subtract_int
|
||||
#define atomic_subtract_rel_int atomic_subtract_int
|
||||
#define atomic_cmpset_acq_int atomic_cmpset_int
|
||||
#define atomic_cmpset_rel_int atomic_cmpset_int
|
||||
#define atomic_cmpset_acq_int atomic_cmpset_int
|
||||
#define atomic_cmpset_rel_int atomic_cmpset_int
|
||||
|
||||
#define atomic_set_acq_long atomic_set_long
|
||||
#define atomic_set_rel_long atomic_set_long
|
||||
@ -271,10 +289,13 @@ ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
|
||||
#define atomic_add_rel_long atomic_add_long
|
||||
#define atomic_subtract_acq_long atomic_subtract_long
|
||||
#define atomic_subtract_rel_long atomic_subtract_long
|
||||
#define atomic_cmpset_acq_long atomic_cmpset_long
|
||||
#define atomic_cmpset_rel_long atomic_cmpset_long
|
||||
|
||||
#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
|
||||
#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
|
||||
#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
|
||||
#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
|
||||
|
||||
/* Operations on 8-bit bytes. */
|
||||
#define atomic_set_8 atomic_set_char
|
||||
#define atomic_set_acq_8 atomic_set_acq_char
|
||||
#define atomic_set_rel_8 atomic_set_rel_char
|
||||
@ -290,6 +311,7 @@ ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
|
||||
#define atomic_load_acq_8 atomic_load_acq_char
|
||||
#define atomic_store_rel_8 atomic_store_rel_char
|
||||
|
||||
/* Operations on 16-bit words. */
|
||||
#define atomic_set_16 atomic_set_short
|
||||
#define atomic_set_acq_16 atomic_set_acq_short
|
||||
#define atomic_set_rel_16 atomic_set_rel_short
|
||||
@ -305,6 +327,7 @@ ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
|
||||
#define atomic_load_acq_16 atomic_load_acq_short
|
||||
#define atomic_store_rel_16 atomic_store_rel_short
|
||||
|
||||
/* Operations on 32-bit double words. */
|
||||
#define atomic_set_32 atomic_set_int
|
||||
#define atomic_set_acq_32 atomic_set_acq_int
|
||||
#define atomic_set_rel_32 atomic_set_rel_int
|
||||
@ -324,7 +347,7 @@ ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
|
||||
#define atomic_cmpset_rel_32 atomic_cmpset_rel_int
|
||||
#define atomic_readandclear_32 atomic_readandclear_int
|
||||
|
||||
#if !defined(WANT_FUNCTIONS)
|
||||
/* Operations on pointers. */
|
||||
static __inline int
|
||||
atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
|
||||
{
|
||||
@ -349,7 +372,7 @@ atomic_store_rel_ptr(volatile void *p, void *v)
|
||||
atomic_store_rel_long((volatile u_long *)p, (u_long)v);
|
||||
}
|
||||
|
||||
#define ATOMIC_PTR(NAME) \
|
||||
#define ATOMIC_PTR(NAME) \
|
||||
static __inline void \
|
||||
atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
|
||||
{ \
|
||||
@ -375,44 +398,5 @@ ATOMIC_PTR(subtract)
|
||||
|
||||
#undef ATOMIC_PTR
|
||||
|
||||
#if defined(__GNUCLIKE_ASM) && defined(__CC_SUPPORTS___INLINE)
|
||||
|
||||
static __inline u_int
|
||||
atomic_readandclear_int(volatile u_int *addr)
|
||||
{
|
||||
u_int result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorl %0,%0 ; "
|
||||
" xchgl %1,%0 ; "
|
||||
"# atomic_readandclear_int"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
static __inline u_long
|
||||
atomic_readandclear_long(volatile u_long *addr)
|
||||
{
|
||||
u_long result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorq %0,%0 ; "
|
||||
" xchgq %1,%0 ; "
|
||||
"# atomic_readandclear_int"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
#else /* !(__GNUCLIKE_ASM && __CC_SUPPORTS___INLINE) */
|
||||
|
||||
extern u_long atomic_readandclear_long(volatile u_long *);
|
||||
extern u_int atomic_readandclear_int(volatile u_int *);
|
||||
|
||||
#endif /* __GNUCLIKE_ASM && __CC_SUPPORTS___INLINE */
|
||||
|
||||
#endif /* !defined(WANT_FUNCTIONS) */
|
||||
#endif /* ! _MACHINE_ATOMIC_H_ */
|
||||
|
@ -26,7 +26,7 @@
|
||||
* $FreeBSD$
|
||||
*/
|
||||
#ifndef _MACHINE_ATOMIC_H_
|
||||
#define _MACHINE_ATOMIC_H_
|
||||
#define _MACHINE_ATOMIC_H_
|
||||
|
||||
#ifndef _SYS_CDEFS_H_
|
||||
#error this file needs sys/cdefs.h as a prerequisite
|
||||
@ -67,8 +67,8 @@
|
||||
* Kernel modules call real functions which are built into the kernel.
|
||||
* This allows kernel modules to be portable between UP and SMP systems.
|
||||
*/
|
||||
#if defined(KLD_MODULE)
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
|
||||
@ -77,25 +77,23 @@ int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
|
||||
u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
|
||||
void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
#else /* !KLD_MODULE */
|
||||
|
||||
#ifdef __GNUCLIKE_ASM
|
||||
#else /* !KLD_MODULE && __GNUCLIKE_ASM */
|
||||
|
||||
/*
|
||||
* For userland, assume the SMP case and use lock prefixes so that
|
||||
* the binaries will run on both types of systems.
|
||||
*/
|
||||
#if defined(SMP) || !defined(_KERNEL)
|
||||
#define MPLOCKED lock ;
|
||||
#define MPLOCKED lock ;
|
||||
#else
|
||||
#define MPLOCKED
|
||||
#define MPLOCKED
|
||||
#endif
|
||||
|
||||
/*
|
||||
* The assembly is volatilized to demark potential before-and-after side
|
||||
* effects if an interrupt or SMP collision were to occur.
|
||||
*/
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
static __inline void \
|
||||
atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
|
||||
{ \
|
||||
@ -105,13 +103,6 @@ atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
|
||||
} \
|
||||
struct __hack
|
||||
|
||||
#else /* !__GNUCLIKE_ASM */
|
||||
|
||||
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
|
||||
extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
#endif /* __GNUCLIKE_ASM */
|
||||
|
||||
/*
|
||||
* Atomic compare and set, used by the mutex functions
|
||||
*
|
||||
@ -120,8 +111,6 @@ extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
* Returns 0 on failure, non-zero on success
|
||||
*/
|
||||
|
||||
#ifdef __GNUCLIKE_ASM
|
||||
|
||||
#if defined(CPU_DISABLE_CMPXCHG)
|
||||
|
||||
static __inline int
|
||||
@ -172,10 +161,6 @@ atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
|
||||
|
||||
#endif /* defined(CPU_DISABLE_CMPXCHG) */
|
||||
|
||||
#endif /* __GNUCLIKE_ASM */
|
||||
|
||||
#ifdef __GNUCLIKE_ASM
|
||||
|
||||
#if defined(_KERNEL) && !defined(SMP)
|
||||
|
||||
/*
|
||||
@ -185,7 +170,7 @@ atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
|
||||
* SMP kernels. For UP kernels, however, the cache of the single processor
|
||||
* is always consistent, so we don't need any memory barriers.
|
||||
*/
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
static __inline u_##TYPE \
|
||||
atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
|
||||
{ \
|
||||
@ -201,7 +186,7 @@ struct __hack
|
||||
|
||||
#else /* defined(SMP) */
|
||||
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
static __inline u_##TYPE \
|
||||
atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
|
||||
{ \
|
||||
@ -230,17 +215,7 @@ struct __hack
|
||||
|
||||
#endif /* !defined(SMP) */
|
||||
|
||||
#else /* !__GNUCLIKE_ASM */
|
||||
|
||||
extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
|
||||
|
||||
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
|
||||
extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
|
||||
extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
|
||||
|
||||
#endif /* __GNUCLIKE_ASM */
|
||||
|
||||
#endif /* KLD_MODULE */
|
||||
#endif /* KLD_MODULE || !__GNUCLIKE_ASM */
|
||||
|
||||
ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
|
||||
ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v);
|
||||
@ -270,6 +245,57 @@ ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0");
|
||||
#undef ATOMIC_ASM
|
||||
#undef ATOMIC_STORE_LOAD
|
||||
|
||||
#if !defined(WANT_FUNCTIONS)
|
||||
|
||||
static __inline int
|
||||
atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
|
||||
{
|
||||
|
||||
return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp,
|
||||
(u_int)src));
|
||||
}
|
||||
|
||||
/* Read the current value and store a zero in the destination. */
|
||||
#ifdef __GNUCLIKE_ASM
|
||||
|
||||
static __inline u_int
|
||||
atomic_readandclear_int(volatile u_int *addr)
|
||||
{
|
||||
u_int result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorl %0,%0 ; "
|
||||
" xchgl %1,%0 ; "
|
||||
"# atomic_readandclear_int"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
static __inline u_long
|
||||
atomic_readandclear_long(volatile u_long *addr)
|
||||
{
|
||||
u_long result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorl %0,%0 ; "
|
||||
" xchgl %1,%0 ; "
|
||||
"# atomic_readandclear_long"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
#else /* !__GNUCLIKE_ASM */
|
||||
|
||||
u_int atomic_readandclear_int(volatile u_int *);
|
||||
u_long atomic_readandclear_long(volatile u_long *);
|
||||
|
||||
#endif /* __GNUCLIKE_ASM */
|
||||
|
||||
/* Acquire and release variants are identical to the normal ones. */
|
||||
#define atomic_set_acq_char atomic_set_char
|
||||
#define atomic_set_rel_char atomic_set_char
|
||||
#define atomic_clear_acq_char atomic_clear_char
|
||||
@ -296,8 +322,8 @@ ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0");
|
||||
#define atomic_add_rel_int atomic_add_int
|
||||
#define atomic_subtract_acq_int atomic_subtract_int
|
||||
#define atomic_subtract_rel_int atomic_subtract_int
|
||||
#define atomic_cmpset_acq_int atomic_cmpset_int
|
||||
#define atomic_cmpset_rel_int atomic_cmpset_int
|
||||
#define atomic_cmpset_acq_int atomic_cmpset_int
|
||||
#define atomic_cmpset_rel_int atomic_cmpset_int
|
||||
|
||||
#define atomic_set_acq_long atomic_set_long
|
||||
#define atomic_set_rel_long atomic_set_long
|
||||
@ -307,13 +333,13 @@ ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0");
|
||||
#define atomic_add_rel_long atomic_add_long
|
||||
#define atomic_subtract_acq_long atomic_subtract_long
|
||||
#define atomic_subtract_rel_long atomic_subtract_long
|
||||
#define atomic_cmpset_long atomic_cmpset_int
|
||||
#define atomic_cmpset_acq_long atomic_cmpset_acq_int
|
||||
#define atomic_cmpset_rel_long atomic_cmpset_rel_int
|
||||
#define atomic_cmpset_acq_long atomic_cmpset_long
|
||||
#define atomic_cmpset_rel_long atomic_cmpset_long
|
||||
|
||||
#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
|
||||
#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
|
||||
#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
|
||||
#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
|
||||
|
||||
/* Operations on 8-bit bytes. */
|
||||
#define atomic_set_8 atomic_set_char
|
||||
#define atomic_set_acq_8 atomic_set_acq_char
|
||||
#define atomic_set_rel_8 atomic_set_rel_char
|
||||
@ -329,6 +355,7 @@ ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0");
|
||||
#define atomic_load_acq_8 atomic_load_acq_char
|
||||
#define atomic_store_rel_8 atomic_store_rel_char
|
||||
|
||||
/* Operations on 16-bit words. */
|
||||
#define atomic_set_16 atomic_set_short
|
||||
#define atomic_set_acq_16 atomic_set_acq_short
|
||||
#define atomic_set_rel_16 atomic_set_rel_short
|
||||
@ -344,6 +371,7 @@ ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0");
|
||||
#define atomic_load_acq_16 atomic_load_acq_short
|
||||
#define atomic_store_rel_16 atomic_store_rel_short
|
||||
|
||||
/* Operations on 32-bit double words. */
|
||||
#define atomic_set_32 atomic_set_int
|
||||
#define atomic_set_acq_32 atomic_set_acq_int
|
||||
#define atomic_set_rel_32 atomic_set_rel_int
|
||||
@ -363,7 +391,7 @@ ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0");
|
||||
#define atomic_cmpset_rel_32 atomic_cmpset_rel_int
|
||||
#define atomic_readandclear_32 atomic_readandclear_int
|
||||
|
||||
#if !defined(WANT_FUNCTIONS)
|
||||
/* Operations on pointers. */
|
||||
static __inline int
|
||||
atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
|
||||
{
|
||||
@ -388,7 +416,7 @@ atomic_store_rel_ptr(volatile void *p, void *v)
|
||||
atomic_store_rel_int((volatile u_int *)p, (u_int)v);
|
||||
}
|
||||
|
||||
#define ATOMIC_PTR(NAME) \
|
||||
#define ATOMIC_PTR(NAME) \
|
||||
static __inline void \
|
||||
atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
|
||||
{ \
|
||||
@ -414,44 +442,5 @@ ATOMIC_PTR(subtract)
|
||||
|
||||
#undef ATOMIC_PTR
|
||||
|
||||
#ifdef __GNUCLIKE_ASM
|
||||
|
||||
static __inline u_int
|
||||
atomic_readandclear_int(volatile u_int *addr)
|
||||
{
|
||||
u_int result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorl %0,%0 ; "
|
||||
" xchgl %1,%0 ; "
|
||||
"# atomic_readandclear_int"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
static __inline u_long
|
||||
atomic_readandclear_long(volatile u_long *addr)
|
||||
{
|
||||
u_long result;
|
||||
|
||||
__asm __volatile (
|
||||
" xorl %0,%0 ; "
|
||||
" xchgl %1,%0 ; "
|
||||
"# atomic_readandclear_int"
|
||||
: "=&r" (result) /* 0 (result) */
|
||||
: "m" (*addr)); /* 1 (addr) */
|
||||
|
||||
return (result);
|
||||
}
|
||||
|
||||
#else /* !__GNUCLIKE_ASM */
|
||||
|
||||
extern u_long atomic_readandclear_long(volatile u_long *);
|
||||
extern u_int atomic_readandclear_int(volatile u_int *);
|
||||
|
||||
#endif /* __GNUCLIKE_ASM */
|
||||
|
||||
#endif /* !defined(WANT_FUNCTIONS) */
|
||||
#endif /* ! _MACHINE_ATOMIC_H_ */
|
||||
|
Loading…
Reference in New Issue
Block a user