amd64: provide custom zpcpu set/add/sub routines
Note that clobbers are highly overzealous, can be cleaned up later.
This commit is contained in:
parent
bee115bc59
commit
2318ed2508
@ -86,10 +86,7 @@ counter_u64_add(counter_u64_t c, int64_t inc)
|
|||||||
{
|
{
|
||||||
|
|
||||||
KASSERT(IS_BSP() || c != EARLY_COUNTER, ("EARLY_COUNTER used on AP"));
|
KASSERT(IS_BSP() || c != EARLY_COUNTER, ("EARLY_COUNTER used on AP"));
|
||||||
__asm __volatile("addq\t%1,%%gs:(%0)"
|
zpcpu_add(c, inc);
|
||||||
:
|
|
||||||
: "r" (c), "ri" (inc)
|
|
||||||
: "memory", "cc");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif /* ! __MACHINE_COUNTER_H__ */
|
#endif /* ! __MACHINE_COUNTER_H__ */
|
||||||
|
@ -244,6 +244,63 @@ _Static_assert(sizeof(struct monitorbuf) == 128, "2x cache line");
|
|||||||
#define zpcpu_base_to_offset(base) (void *)((uintptr_t)(base) - (uintptr_t)&__pcpu[0])
|
#define zpcpu_base_to_offset(base) (void *)((uintptr_t)(base) - (uintptr_t)&__pcpu[0])
|
||||||
#define zpcpu_offset_to_base(base) (void *)((uintptr_t)(base) + (uintptr_t)&__pcpu[0])
|
#define zpcpu_offset_to_base(base) (void *)((uintptr_t)(base) + (uintptr_t)&__pcpu[0])
|
||||||
|
|
||||||
|
#define zpcpu_sub_protected(base, n) do { \
|
||||||
|
ZPCPU_ASSERT_PROTECTED(); \
|
||||||
|
zpcpu_sub(base, n); \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
|
#define zpcpu_set_protected(base, n) do { \
|
||||||
|
__typeof(*base) __n = (n); \
|
||||||
|
ZPCPU_ASSERT_PROTECTED(); \
|
||||||
|
switch (sizeof(*base)) { \
|
||||||
|
case 4: \
|
||||||
|
__asm __volatile("movl\t%1,%%gs:(%0)" \
|
||||||
|
: : "r" (base), "ri" (__n) : "memory", "cc"); \
|
||||||
|
break; \
|
||||||
|
case 8: \
|
||||||
|
__asm __volatile("movq\t%1,%%gs:(%0)" \
|
||||||
|
: : "r" (base), "ri" (__n) : "memory", "cc"); \
|
||||||
|
break; \
|
||||||
|
default: \
|
||||||
|
*zpcpu_get(base) = __n; \
|
||||||
|
} \
|
||||||
|
} while (0);
|
||||||
|
|
||||||
|
#define zpcpu_add(base, n) do { \
|
||||||
|
__typeof(*base) __n = (n); \
|
||||||
|
CTASSERT(sizeof(*base) == 4 || sizeof(*base) == 8); \
|
||||||
|
switch (sizeof(*base)) { \
|
||||||
|
case 4: \
|
||||||
|
__asm __volatile("addl\t%1,%%gs:(%0)" \
|
||||||
|
: : "r" (base), "ri" (__n) : "memory", "cc"); \
|
||||||
|
break; \
|
||||||
|
case 8: \
|
||||||
|
__asm __volatile("addq\t%1,%%gs:(%0)" \
|
||||||
|
: : "r" (base), "ri" (__n) : "memory", "cc"); \
|
||||||
|
break; \
|
||||||
|
} \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
|
#define zpcpu_add_protected(base, n) do { \
|
||||||
|
ZPCPU_ASSERT_PROTECTED(); \
|
||||||
|
zpcpu_add(base, n); \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
|
#define zpcpu_sub(base, n) do { \
|
||||||
|
__typeof(*base) __n = (n); \
|
||||||
|
CTASSERT(sizeof(*base) == 4 || sizeof(*base) == 8); \
|
||||||
|
switch (sizeof(*base)) { \
|
||||||
|
case 4: \
|
||||||
|
__asm __volatile("subl\t%1,%%gs:(%0)" \
|
||||||
|
: : "r" (base), "ri" (__n) : "memory", "cc"); \
|
||||||
|
break; \
|
||||||
|
case 8: \
|
||||||
|
__asm __volatile("subq\t%1,%%gs:(%0)" \
|
||||||
|
: : "r" (base), "ri" (__n) : "memory", "cc"); \
|
||||||
|
break; \
|
||||||
|
} \
|
||||||
|
} while (0);
|
||||||
|
|
||||||
#else /* !__GNUCLIKE_ASM || !__GNUCLIKE___TYPEOF */
|
#else /* !__GNUCLIKE_ASM || !__GNUCLIKE___TYPEOF */
|
||||||
|
|
||||||
#error "this file needs to be ported to your compiler"
|
#error "this file needs to be ported to your compiler"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user