Skip to content

Commit

Permalink
amd64: provide custom zpcpu set/add/sub routines
Browse files Browse the repository at this point in the history
Note that clobbers are highly overzealous, can be cleaned up later.
  • Loading branch information
mjguzik committed Feb 12, 2020
1 parent 85354a6 commit 9d8144f
Show file tree
Hide file tree
Showing 2 changed files with 58 additions and 4 deletions.
5 changes: 1 addition & 4 deletions sys/amd64/include/counter.h
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,7 @@ counter_u64_add(counter_u64_t c, int64_t inc)
{

KASSERT(IS_BSP() || c != EARLY_COUNTER, ("EARLY_COUNTER used on AP"));
__asm __volatile("addq\t%1,%%gs:(%0)"
:
: "r" (c), "ri" (inc)
: "memory", "cc");
zpcpu_add(c, inc);
}

#endif /* ! __MACHINE_COUNTER_H__ */
57 changes: 57 additions & 0 deletions sys/amd64/include/pcpu.h
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,63 @@ _Static_assert(sizeof(struct monitorbuf) == 128, "2x cache line");
#define zpcpu_base_to_offset(base) (void *)((uintptr_t)(base) - (uintptr_t)&__pcpu[0])
#define zpcpu_offset_to_base(base) (void *)((uintptr_t)(base) + (uintptr_t)&__pcpu[0])

#define zpcpu_sub_protected(base, n) do { \
ZPCPU_ASSERT_PROTECTED(); \
zpcpu_sub(base, n); \
} while (0)

#define zpcpu_set_protected(base, n) do { \
__typeof(*base) __n = (n); \
ZPCPU_ASSERT_PROTECTED(); \
switch (sizeof(*base)) { \
case 4: \
__asm __volatile("movl\t%1,%%gs:(%0)" \
: : "r" (base), "ri" (__n) : "memory", "cc"); \
break; \
case 8: \
__asm __volatile("movq\t%1,%%gs:(%0)" \
: : "r" (base), "ri" (__n) : "memory", "cc"); \
break; \
default: \
*zpcpu_get(base) = __n; \
} \
} while (0);

#define zpcpu_add(base, n) do { \
__typeof(*base) __n = (n); \
CTASSERT(sizeof(*base) == 4 || sizeof(*base) == 8); \
switch (sizeof(*base)) { \
case 4: \
__asm __volatile("addl\t%1,%%gs:(%0)" \
: : "r" (base), "ri" (__n) : "memory", "cc"); \
break; \
case 8: \
__asm __volatile("addq\t%1,%%gs:(%0)" \
: : "r" (base), "ri" (__n) : "memory", "cc"); \
break; \
} \
} while (0)

#define zpcpu_add_protected(base, n) do { \
ZPCPU_ASSERT_PROTECTED(); \
zpcpu_add(base, n); \
} while (0)

#define zpcpu_sub(base, n) do { \
__typeof(*base) __n = (n); \
CTASSERT(sizeof(*base) == 4 || sizeof(*base) == 8); \
switch (sizeof(*base)) { \
case 4: \
__asm __volatile("subl\t%1,%%gs:(%0)" \
: : "r" (base), "ri" (__n) : "memory", "cc"); \
break; \
case 8: \
__asm __volatile("subq\t%1,%%gs:(%0)" \
: : "r" (base), "ri" (__n) : "memory", "cc"); \
break; \
} \
} while (0);

#else /* !__GNUCLIKE_ASM || !__GNUCLIKE___TYPEOF */

#error "this file needs to be ported to your compiler"
Expand Down

0 comments on commit 9d8144f

Please sign in to comment.