Author: mjg
Date: Wed Feb 12 11:15:33 2020
New Revision: 357807
URL: https://svnweb.freebsd.org/changeset/base/357807

Log:
  amd64: provide custom zpcpu set/add/sub routines
  
  Note that clobbers are highly overzealous, can be cleaned up later.

Modified:
  head/sys/amd64/include/counter.h
  head/sys/amd64/include/pcpu.h

Modified: head/sys/amd64/include/counter.h
==============================================================================
--- head/sys/amd64/include/counter.h    Wed Feb 12 11:14:23 2020        
(r357806)
+++ head/sys/amd64/include/counter.h    Wed Feb 12 11:15:33 2020        
(r357807)
@@ -86,10 +86,7 @@ counter_u64_add(counter_u64_t c, int64_t inc)
 {
 
        KASSERT(IS_BSP() || c != EARLY_COUNTER, ("EARLY_COUNTER used on AP"));
-       __asm __volatile("addq\t%1,%%gs:(%0)"
-           :
-           : "r" (c), "ri" (inc)
-           : "memory", "cc");
+       zpcpu_add(c, inc);
 }
 
 #endif /* ! __MACHINE_COUNTER_H__ */

Modified: head/sys/amd64/include/pcpu.h
==============================================================================
--- head/sys/amd64/include/pcpu.h       Wed Feb 12 11:14:23 2020        
(r357806)
+++ head/sys/amd64/include/pcpu.h       Wed Feb 12 11:15:33 2020        
(r357807)
@@ -244,6 +244,63 @@ _Static_assert(sizeof(struct monitorbuf) == 128, "2x c
 #define zpcpu_base_to_offset(base) (void *)((uintptr_t)(base) - 
(uintptr_t)&__pcpu[0])
 #define zpcpu_offset_to_base(base) (void *)((uintptr_t)(base) + 
(uintptr_t)&__pcpu[0])
 
+#define zpcpu_sub_protected(base, n) do {                              \
+       ZPCPU_ASSERT_PROTECTED();                                       \
+       zpcpu_sub(base, n);                                             \
+} while (0)
+
+#define zpcpu_set_protected(base, n) do {                              \
+       __typeof(*base) __n = (n);                                      \
+       ZPCPU_ASSERT_PROTECTED();                                       \
+       switch (sizeof(*base)) {                                        \
+       case 4:                                                         \
+               __asm __volatile("movl\t%1,%%gs:(%0)"                   \
+                   : : "r" (base), "ri" (__n) : "memory", "cc");       \
+               break;                                                  \
+       case 8:                                                         \
+               __asm __volatile("movq\t%1,%%gs:(%0)"                   \
+                   : : "r" (base), "ri" (__n) : "memory", "cc");       \
+               break;                                                  \
+       default:                                                        \
+               *zpcpu_get(base) = __n;                                 \
+       }                                                               \
+} while (0);
+
+#define zpcpu_add(base, n) do {                                                
\
+       __typeof(*base) __n = (n);                                      \
+       CTASSERT(sizeof(*base) == 4 || sizeof(*base) == 8);             \
+       switch (sizeof(*base)) {                                        \
+       case 4:                                                         \
+               __asm __volatile("addl\t%1,%%gs:(%0)"                   \
+                   : : "r" (base), "ri" (__n) : "memory", "cc");       \
+               break;                                                  \
+       case 8:                                                         \
+               __asm __volatile("addq\t%1,%%gs:(%0)"                   \
+                   : : "r" (base), "ri" (__n) : "memory", "cc");       \
+               break;                                                  \
+       }                                                               \
+} while (0)
+
+#define zpcpu_add_protected(base, n) do {                              \
+       ZPCPU_ASSERT_PROTECTED();                                       \
+       zpcpu_add(base, n);                                             \
+} while (0)
+
+#define zpcpu_sub(base, n) do {                                                
\
+       __typeof(*base) __n = (n);                                      \
+       CTASSERT(sizeof(*base) == 4 || sizeof(*base) == 8);             \
+       switch (sizeof(*base)) {                                        \
+       case 4:                                                         \
+               __asm __volatile("subl\t%1,%%gs:(%0)"                   \
+                   : : "r" (base), "ri" (__n) : "memory", "cc");       \
+               break;                                                  \
+       case 8:                                                         \
+               __asm __volatile("subq\t%1,%%gs:(%0)"                   \
+                   : : "r" (base), "ri" (__n) : "memory", "cc");       \
+               break;                                                  \
+       }                                                               \
+} while (0);
+
 #else /* !__GNUCLIKE_ASM || !__GNUCLIKE___TYPEOF */
 
 #error "this file needs to be ported to your compiler"
_______________________________________________
svn-src-all@freebsd.org mailing list
https://lists.freebsd.org/mailman/listinfo/svn-src-all
To unsubscribe, send any mail to "svn-src-all-unsubscr...@freebsd.org"

Reply via email to