Add atomic_fcmpset_*() inlines for powerpc

Summary:
atomic_fcmpset_*() is analogous to atomic_cmpset(), but saves off the read value
from the target memory location into the 'old' pointer in the case of failure.

Requested by:	 mjg
Differential Revision: https://reviews.freebsd.org/D9325
This commit is contained in:
Justin Hibbits 2017-01-30 02:15:54 +00:00
parent 9764ef21c4
commit 02f151d412
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=312973

View File

@ -674,6 +674,129 @@ atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval)
#define atomic_cmpset_rel_ptr atomic_cmpset_rel_int
#endif
/*
* Atomically compare the value stored at *p with *cmpval and if the
* two values are equal, update the value of *p with newval. Returns
* zero if the compare failed and sets *cmpval to the read value from *p,
* nonzero otherwise.
*/
static __inline int
atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval)
{
int ret;
#ifdef __GNUCLIKE_ASM
__asm __volatile (
"1:\tlwarx %0, 0, %3\n\t" /* load old value */
"cmplw %4, %0\n\t" /* compare */
"bne 2f\n\t" /* exit if not equal */
"stwcx. %5, 0, %3\n\t" /* attempt to store */
"bne- 1b\n\t" /* spin if failed */
"li %0, 1\n\t" /* success - retval = 1 */
"b 3f\n\t" /* we've succeeded */
"2:\n\t"
"stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
"stwx %0, 0, %7\n\t"
"li %0, 0\n\t" /* failure - retval = 0 */
"3:\n\t"
: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
: "cr0", "memory");
#endif
return (ret);
}
static __inline int
atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval)
{
int ret;
#ifdef __GNUCLIKE_ASM
__asm __volatile (
#ifdef __powerpc64__
"1:\tldarx %0, 0, %3\n\t" /* load old value */
"cmpld %4, %0\n\t" /* compare */
"bne 2f\n\t" /* exit if not equal */
"stdcx. %5, 0, %3\n\t" /* attempt to store */
#else
"1:\tlwarx %0, 0, %3\n\t" /* load old value */
"cmplw %4, %0\n\t" /* compare */
"bne 2f\n\t" /* exit if not equal */
"stwcx. %5, 0, %3\n\t" /* attempt to store */
#endif
"bne- 1b\n\t" /* spin if failed */
"li %0, 1\n\t" /* success - retval = 1 */
"b 3f\n\t" /* we've succeeded */
"2:\n\t"
#ifdef __powerpc64__
"stdcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
"stdx %0, 0, %7\n\t"
#else
"stwcx. %0, 0, %3\n\t" /* clear reservation (74xx) */
"stwx %0, 0, %7\n\t"
#endif
"li %0, 0\n\t" /* failure - retval = 0 */
"3:\n\t"
: "=&r" (ret), "=m" (*p), "=m" (*cmpval)
: "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval)
: "cr0", "memory");
#endif
return (ret);
}
static __inline int
atomic_fcmpset_acq_int(volatile u_int *p, u_int *cmpval, u_int newval)
{
int retval;
retval = atomic_fcmpset_int(p, cmpval, newval);
__ATOMIC_ACQ();
return (retval);
}
static __inline int
atomic_fcmpset_rel_int(volatile u_int *p, u_int *cmpval, u_int newval)
{
__ATOMIC_REL();
return (atomic_fcmpset_int(p, cmpval, newval));
}
static __inline int
atomic_fcmpset_acq_long(volatile u_long *p, u_long *cmpval, u_long newval)
{
u_long retval;
retval = atomic_fcmpset_long(p, cmpval, newval);
__ATOMIC_ACQ();
return (retval);
}
static __inline int
atomic_fcmpset_rel_long(volatile u_long *p, u_long *cmpval, u_long newval)
{
__ATOMIC_REL();
return (atomic_fcmpset_long(p, cmpval, newval));
}
#define atomic_fcmpset_32 atomic_fcmpset_int
#define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int
#define atomic_fcmpset_rel_32 atomic_fcmpset_rel_int
#ifdef __powerpc64__
#define atomic_fcmpset_64 atomic_fcmpset_long
#define atomic_fcmpset_acq_64 atomic_fcmpset_acq_long
#define atomic_fcmpset_rel_64 atomic_fcmpset_rel_long
#define atomic_fcmpset_ptr atomic_fcmpset_long
#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long
#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long
#else
#define atomic_fcmpset_ptr atomic_fcmpset_int
#define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_int
#define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_int
#endif
static __inline u_int
atomic_fetchadd_int(volatile u_int *p, u_int v)
{