Beautify. This has the side effect of improving portability and

making lint work cleaner.

Inspired to do this by:	jhb
This commit is contained in:
Mark Murray 2002-07-18 15:56:46 +00:00
parent f6e34b823a
commit 4c5aee92a7
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=100327
2 changed files with 62 additions and 78 deletions

View File

@ -28,12 +28,6 @@
#ifndef _MACHINE_ATOMIC_H_
#define _MACHINE_ATOMIC_H_
#ifndef __GNUC__
#ifndef lint
#error "This file must be compiled with GCC or lint"
#endif /* lint */
#endif /* __GNUC__ */
/*
* Various simple arithmetic on memory which is atomic in the presence
* of interrupts and multiple processors.
@ -81,6 +75,8 @@ void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#else /* !KLD_MODULE */
#ifdef __GNUC__
/*
* For userland, assume the SMP case and use lock prefixes so that
* the binaries will run on both types of systems.
@ -95,7 +91,6 @@ void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
* The assembly is volatilized to demark potential before-and-after side
* effects if an interrupt or SMP collision were to occur.
*/
#ifdef __GNUC__
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
static __inline void \
atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
@ -104,9 +99,12 @@ atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
: "+m" (*p) \
: CONS (V)); \
}
#else /* !__GNUC__ */
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#endif /* __GNUC__ */
/*
@ -118,7 +116,9 @@ void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
*/
#if defined(__GNUC__)
#if defined(I386_CPU)
static __inline int
atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
{
@ -142,7 +142,9 @@ atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
return (res);
}
#else /* defined(I386_CPU) */
static __inline int
atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
{
@ -162,17 +164,15 @@ atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
return (res);
}
#endif /* defined(I386_CPU) */
#else /* !defined(__GNUC__) */
static __inline int
atomic_cmpset_int(volatile u_int *dst __unused, u_int exp __unused,
u_int src __unused)
{
}
#endif /* defined(__GNUC__) */
#if defined(__GNUC__)
#if defined(I386_CPU)
/*
* We assume that a = b will do atomic loads and stores.
*
@ -192,6 +192,7 @@ atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
*p = v; \
__asm __volatile("" : : : "memory"); \
}
#else /* !defined(I386_CPU) */
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
@ -219,18 +220,19 @@ atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
"+r" (v) /* 1 */ \
: : "memory"); \
}
#endif /* defined(I386_CPU) */
#else /* !defined(__GNUC__) */
/*
* XXXX: Dummy functions!!
*/
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p __unused); \
void atomic_store_rel_##TYPE(volatile u_##TYPE *p __unused, \
u_##TYPE v __unused)
extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#endif /* defined(__GNUC__) */
#endif /* KLD_MODULE */
ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
@ -402,6 +404,7 @@ ATOMIC_PTR(subtract)
#undef ATOMIC_PTR
#if defined(__GNUC__)
static __inline u_int
atomic_readandclear_int(volatile u_int *addr)
{
@ -416,17 +419,7 @@ atomic_readandclear_int(volatile u_int *addr)
return (result);
}
#else /* !defined(__GNUC__) */
/*
* XXXX: Dummy!
*/
static __inline u_int
atomic_readandclear_int(volatile u_int *addr __unused)
{
}
#endif /* defined(__GNUC__) */
#if defined(__GNUC__)
static __inline u_long
atomic_readandclear_long(volatile u_long *addr)
{
@ -441,14 +434,13 @@ atomic_readandclear_long(volatile u_long *addr)
return (result);
}
#else /* !defined(__GNUC__) */
/*
* XXXX: Dummy!
*/
static __inline u_long
atomic_readandclear_long(volatile u_long *addr __unused)
{
}
extern u_long atomic_readandclear_long(volatile u_long *);
extern u_int atomic_readandclear_int(volatile u_int *);
#endif /* defined(__GNUC__) */
#endif /* !defined(WANT_FUNCTIONS) */
#endif /* ! _MACHINE_ATOMIC_H_ */

View File

@ -28,12 +28,6 @@
#ifndef _MACHINE_ATOMIC_H_
#define _MACHINE_ATOMIC_H_
#ifndef __GNUC__
#ifndef lint
#error "This file must be compiled with GCC or lint"
#endif /* lint */
#endif /* __GNUC__ */
/*
* Various simple arithmetic on memory which is atomic in the presence
* of interrupts and multiple processors.
@ -81,6 +75,8 @@ void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#else /* !KLD_MODULE */
#ifdef __GNUC__
/*
* For userland, assume the SMP case and use lock prefixes so that
* the binaries will run on both types of systems.
@ -95,7 +91,6 @@ void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
* The assembly is volatilized to demark potential before-and-after side
* effects if an interrupt or SMP collision were to occur.
*/
#ifdef __GNUC__
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
static __inline void \
atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
@ -104,9 +99,12 @@ atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
: "+m" (*p) \
: CONS (V)); \
}
#else /* !__GNUC__ */
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
extern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#endif /* __GNUC__ */
/*
@ -118,7 +116,9 @@ void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
*/
#if defined(__GNUC__)
#if defined(I386_CPU)
static __inline int
atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
{
@ -142,7 +142,9 @@ atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
return (res);
}
#else /* defined(I386_CPU) */
static __inline int
atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
{
@ -162,17 +164,15 @@ atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
return (res);
}
#endif /* defined(I386_CPU) */
#else /* !defined(__GNUC__) */
static __inline int
atomic_cmpset_int(volatile u_int *dst __unused, u_int exp __unused,
u_int src __unused)
{
}
#endif /* defined(__GNUC__) */
#if defined(__GNUC__)
#if defined(I386_CPU)
/*
* We assume that a = b will do atomic loads and stores.
*
@ -192,6 +192,7 @@ atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
*p = v; \
__asm __volatile("" : : : "memory"); \
}
#else /* !defined(I386_CPU) */
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
@ -219,18 +220,19 @@ atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
"+r" (v) /* 1 */ \
: : "memory"); \
}
#endif /* defined(I386_CPU) */
#else /* !defined(__GNUC__) */
/*
* XXXX: Dummy functions!!
*/
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p __unused); \
void atomic_store_rel_##TYPE(volatile u_##TYPE *p __unused, \
u_##TYPE v __unused)
extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
#endif /* defined(__GNUC__) */
#endif /* KLD_MODULE */
ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
@ -402,6 +404,7 @@ ATOMIC_PTR(subtract)
#undef ATOMIC_PTR
#if defined(__GNUC__)
static __inline u_int
atomic_readandclear_int(volatile u_int *addr)
{
@ -416,17 +419,7 @@ atomic_readandclear_int(volatile u_int *addr)
return (result);
}
#else /* !defined(__GNUC__) */
/*
* XXXX: Dummy!
*/
static __inline u_int
atomic_readandclear_int(volatile u_int *addr __unused)
{
}
#endif /* defined(__GNUC__) */
#if defined(__GNUC__)
static __inline u_long
atomic_readandclear_long(volatile u_long *addr)
{
@ -441,14 +434,13 @@ atomic_readandclear_long(volatile u_long *addr)
return (result);
}
#else /* !defined(__GNUC__) */
/*
* XXXX: Dummy!
*/
static __inline u_long
atomic_readandclear_long(volatile u_long *addr __unused)
{
}
extern u_long atomic_readandclear_long(volatile u_long *);
extern u_int atomic_readandclear_int(volatile u_int *);
#endif /* defined(__GNUC__) */
#endif /* !defined(WANT_FUNCTIONS) */
#endif /* ! _MACHINE_ATOMIC_H_ */