Implement atomic_add_64() and atomic_subtract_64() for the i386 target.

While at it add missing _acq_ and _rel_ variants for 64-bit atomic
operations under i386.

Reviewed by:	kib @
MFC after:	1 week
Sponsored by:	Mellanox Technologies
This commit is contained in:
hselasky 2018-05-29 11:59:02 +00:00
parent 5f3cc62a31
commit 831d81975f
3 changed files with 42 additions and 3 deletions

View File

@ -52,7 +52,8 @@ atomic_init(void)
}
#endif
#if !defined(__LP64__) && !defined(__mips_n32) && !defined(ARM_HAVE_ATOMIC64)
#if !defined(__LP64__) && !defined(__mips_n32) && \
!defined(ARM_HAVE_ATOMIC64) && !defined(__i386__)
void
atomic_add_64(volatile uint64_t *target, int64_t delta)
{

View File

@ -36,7 +36,8 @@
atomic_cmpset_ptr((volatile uintptr_t *)(_a), (uintptr_t)(_b), (uintptr_t) (_c))
#define cas32 atomic_cmpset_32
#if !defined(__LP64__) && !defined(__mips_n32) && !defined(ARM_HAVE_ATOMIC64)
#if !defined(__LP64__) && !defined(__mips_n32) && \
!defined(ARM_HAVE_ATOMIC64) && !defined(__i386__)
extern void atomic_add_64(volatile uint64_t *target, int64_t delta);
extern void atomic_dec_64(volatile uint64_t *target);
#endif
@ -85,7 +86,8 @@ atomic_dec_32_nv(volatile uint32_t *target)
return (atomic_fetchadd_32(target, -1) - 1);
}
#if defined(__LP64__) || defined(__mips_n32) || defined(ARM_HAVE_ATOMIC64)
#if defined(__LP64__) || defined(__mips_n32) || \
defined(ARM_HAVE_ATOMIC64) || defined(__i386__)
static __inline void
atomic_dec_64(volatile uint64_t *target)
{

View File

@ -134,6 +134,8 @@ uint64_t atomic_load_acq_64(volatile uint64_t *);
void atomic_store_rel_64(volatile uint64_t *, uint64_t);
uint64_t atomic_swap_64(volatile uint64_t *, uint64_t);
uint64_t atomic_fetchadd_64(volatile uint64_t *, uint64_t);
void atomic_add_64(volatile uint64_t *, uint64_t);
void atomic_subtract_64(volatile uint64_t *, uint64_t);
#else /* !KLD_MODULE && __GNUCLIKE_ASM */
@ -581,6 +583,30 @@ atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
}
}
static __inline void
atomic_add_64(volatile uint64_t *p, uint64_t v)
{
uint64_t t;
for (;;) {
t = *p;
if (atomic_cmpset_64(p, t, t + v))
break;
}
}
static __inline void
atomic_subtract_64(volatile uint64_t *p, uint64_t v)
{
uint64_t t;
for (;;) {
t = *p;
if (atomic_cmpset_64(p, t, t - v))
break;
}
}
#endif /* _KERNEL */
#endif /* KLD_MODULE || !__GNUCLIKE_ASM */
@ -805,6 +831,16 @@ u_long atomic_swap_long(volatile u_long *p, u_long v);
#define atomic_testandset_32 atomic_testandset_int
#define atomic_testandclear_32 atomic_testandclear_int
/* Operations on 64-bit quad words. */
#define atomic_cmpset_acq_64 atomic_cmpset_64
#define atomic_cmpset_rel_64 atomic_cmpset_64
#define atomic_fetchadd_acq_64 atomic_fetchadd_64
#define atomic_fetchadd_rel_64 atomic_fetchadd_64
#define atomic_add_acq_64 atomic_add_64
#define atomic_add_rel_64 atomic_add_64
#define atomic_subtract_acq_64 atomic_subtract_64
#define atomic_subtract_rel_64 atomic_subtract_64
/* Operations on pointers. */
#define atomic_set_ptr(p, v) \
atomic_set_int((volatile u_int *)(p), (u_int)(v))