From 25a1e0f636007140668fc68918e7238a07142573 Mon Sep 17 00:00:00 2001 From: Hans Petter Selasky Date: Thu, 9 Aug 2018 11:30:13 +0000 Subject: [PATCH] Implement missing atomic_fcmpset_XXX() support for i386. This also fixes i386 build after r337527. MFC after: 1 week Sponsored by: Mellanox Technologies --- sys/i386/include/atomic.h | 51 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/sys/i386/include/atomic.h b/sys/i386/include/atomic.h index bda2a620af00..0d673af73581 100644 --- a/sys/i386/include/atomic.h +++ b/sys/i386/include/atomic.h @@ -130,6 +130,7 @@ u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) int atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t); +int atomic_fcmpset_64(volatile uint64_t *, uint64_t *, uint64_t); uint64_t atomic_load_acq_64(volatile uint64_t *); void atomic_store_rel_64(volatile uint64_t *, uint64_t); uint64_t atomic_swap_64(volatile uint64_t *, uint64_t); @@ -404,6 +405,18 @@ atomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src) return (res); } +static __inline int +atomic_fcmpset_64_i386(volatile uint64_t *dst, uint64_t *expect, uint64_t src) +{ + + if (atomic_cmpset_64_i386(dst, *expect, src)) { + return (1); + } else { + *expect = *dst; + return (0); + } +} + static __inline uint64_t atomic_load_acq_64_i386(volatile uint64_t *p) { @@ -483,6 +496,24 @@ atomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src) return (res); } +static __inline int +atomic_fcmpset_64_i586(volatile uint64_t *dst, uint64_t *expect, uint64_t src) +{ + u_char res; + + __asm __volatile( + " " MPLOCKED " " + " cmpxchg8b %1 ; " + " sete %0" + : "=q" (res), /* 0 */ + "+m" (*dst), /* 1 */ + "+A" (*expect) /* 2 */ + : "b" ((uint32_t)src), /* 3 */ + "c" ((uint32_t)(src >> 32)) /* 4 */ + : "memory", "cc"); + return (res); +} + static __inline uint64_t atomic_load_acq_64_i586(volatile uint64_t *p) { @@ -542,6 +573,16 @@ atomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src) return (atomic_cmpset_64_i586(dst, expect, src)); } +static __inline int +atomic_fcmpset_64(volatile uint64_t *dst, uint64_t *expect, uint64_t src) +{ + + if ((cpu_feature & CPUID_CX8) == 0) + return (atomic_fcmpset_64_i386(dst, expect, src)); + else + return (atomic_fcmpset_64_i586(dst, expect, src)); +} + static __inline uint64_t atomic_load_acq_64(volatile uint64_t *p) { @@ -655,6 +696,14 @@ atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) (u_int)src)); } +static __inline int +atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src) +{ + + return (atomic_fcmpset_int((volatile u_int *)dst, (u_int *)expect, + (u_int)src)); +} + static __inline u_long atomic_fetchadd_long(volatile u_long *p, u_long v) { @@ -834,6 +883,8 @@ u_long atomic_swap_long(volatile u_long *p, u_long v); /* Operations on 64-bit quad words. */ #define atomic_cmpset_acq_64 atomic_cmpset_64 #define atomic_cmpset_rel_64 atomic_cmpset_64 +#define atomic_fcmpset_acq_64 atomic_fcmpset_64 +#define atomic_fcmpset_rel_64 atomic_fcmpset_64 #define atomic_fetchadd_acq_64 atomic_fetchadd_64 #define atomic_fetchadd_rel_64 atomic_fetchadd_64 #define atomic_add_acq_64 atomic_add_64