Add an unified macro to deny ability from the compiler to reorder

instruction loads/stores at its will.
The macro __compiler_membar() is currently supported for both gcc and
clang, but kernel compilation will fail otherwise.

Reviewed by:	bde, kib
Discussed with:	dim, theraven
MFC after:	2 weeks
This commit is contained in:
Attilio Rao 2012-10-09 14:32:30 +00:00
parent 6a762eb23e
commit 3a4730256a
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=241374
10 changed files with 20 additions and 21 deletions

View File

@ -226,7 +226,7 @@ atomic_fetchadd_long(volatile u_long *p, u_long v)
static __inline void \
atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
{ \
__asm __volatile("" : : : "memory"); \
__compiler_membar(); \
*p = v; \
} \
struct __hack
@ -240,7 +240,7 @@ atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
u_##TYPE tmp; \
\
tmp = *p; \
__asm __volatile("" : : : "memory"); \
__compiler_membar(); \
return (tmp); \
} \
struct __hack

View File

@ -129,10 +129,6 @@ typedef dev_t os_dev_t;
#define copy_from_user(dst, src, len) copyin((src), (dst), (len))
#endif
#ifndef barrier
#define barrier() __asm__ __volatile__("": : :"memory")
#endif
/*
* Map simple global vairables to FreeBSD kernel equivalents
*/

View File

@ -162,7 +162,7 @@
*/
#define EFSCORRUPTED 990 /* Filesystem is corrupted */
#define SYNCHRONIZE() barrier()
#define SYNCHRONIZE() __compiler_membar()
#define __return_address __builtin_return_address(0)
/*

View File

@ -296,7 +296,7 @@ atomic_fetchadd_int(volatile u_int *p, u_int v)
static __inline void \
atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
{ \
__asm __volatile("" : : : "memory"); \
__compiler_membar(); \
*p = v; \
} \
struct __hack
@ -310,7 +310,7 @@ atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
u_##TYPE tmp; \
\
tmp = *p; \
__asm __volatile("" : : : "memory"); \
__compiler_membar(); \
return (tmp); \
} \
struct __hack

View File

@ -65,10 +65,6 @@ __FBSDID("$FreeBSD$");
* does not seem very useful
*/
static __inline void compiler_memory_barrier(void) {
__asm __volatile("":::"memory");
}
static void assert_rm(const struct lock_object *lock, int what);
static void lock_rm(struct lock_object *lock, int how);
#ifdef KDTRACE_HOOKS
@ -353,7 +349,7 @@ _rm_rlock(struct rmlock *rm, struct rm_priotracker *tracker, int trylock)
td->td_critnest++; /* critical_enter(); */
compiler_memory_barrier();
__compiler_membar();
pc = cpuid_to_pcpu[td->td_oncpu]; /* pcpu_find(td->td_oncpu); */
@ -361,7 +357,7 @@ _rm_rlock(struct rmlock *rm, struct rm_priotracker *tracker, int trylock)
sched_pin();
compiler_memory_barrier();
__compiler_membar();
td->td_critnest--;

View File

@ -70,7 +70,7 @@ static __inline void
mips_barrier(void)
{
#if defined(CPU_CNMIPS) || defined(CPU_RMI) || defined(CPU_NLM)
__asm __volatile("" : : : "memory");
__compiler_membar();
#else
__asm __volatile (".set noreorder\n\t"
"nop\n\t"

View File

@ -593,7 +593,7 @@ bus_space_barrier(bus_space_tag_t tag, bus_space_handle_t bsh,
if (flags & BUS_SPACE_BARRIER_READ)
__asm __volatile("lock; addl $0,0(%%esp)" : : : "memory");
else
__asm __volatile("" : : : "memory");
__compiler_membar();
}
#ifdef BUS_SPACE_NO_LEGACY

View File

@ -97,7 +97,7 @@
#define atomic_cas_acq(p, e, s, sz) ({ \
itype(sz) v; \
v = atomic_cas((p), (e), (s), sz); \
__asm __volatile("" : : : "memory"); \
__compiler_membar(); \
v; \
})
@ -122,7 +122,7 @@
#define atomic_op_acq(p, op, v, sz) ({ \
itype(sz) t; \
t = atomic_op((p), op, (v), sz); \
__asm __volatile("" : : : "memory"); \
__compiler_membar(); \
t; \
})
@ -139,7 +139,7 @@
#define atomic_load_acq(p, sz) ({ \
itype(sz) v; \
v = atomic_load((p), sz); \
__asm __volatile("" : : : "memory"); \
__compiler_membar(); \
v; \
})

View File

@ -82,6 +82,13 @@
# define __GNUC_VA_LIST_COMPATIBILITY 1
#endif
/*
* Compiler memory barriers, specific to gcc and clang.
*/
#if defined(__GNUC__)
#define __compiler_membar() __asm __volatile(" " : : : "memory")
#endif
#ifndef __INTEL_COMPILER
# define __GNUCLIKE_BUILTIN_NEXT_ARG 1
# define __GNUCLIKE_MATH_BUILTIN_RELOPS

View File

@ -1014,7 +1014,7 @@ bus_space_barrier(bus_space_tag_t tag __unused, bus_space_handle_t bsh __unused,
__asm __volatile("lock; addl $0,0(%%esp)" : : : "memory");
#endif
else
__asm __volatile("" : : : "memory");
__compiler_membar();
#endif
}