Gitweb:     
http://git.kernel.org/git/?p=linux/kernel/git/torvalds/linux-2.6.git;a=commit;h=24f287e412ae90de8d281543c8b1043b6ed6c019
Commit:     24f287e412ae90de8d281543c8b1043b6ed6c019
Parent:     d85714d81cc0408daddb68c10f7fd69eafe7c213
Author:     David S. Miller <[EMAIL PROTECTED]>
AuthorDate: Mon Oct 15 16:41:44 2007 -0700
Committer:  David S. Miller <[EMAIL PROTECTED]>
CommitDate: Wed Oct 17 16:24:55 2007 -0700

    [SPARC64]: Implement atomic backoff.
    
    When the cpu count is high and contention hits an atomic object, the
    processors can synchronize such that some cpus continually get knocked
    out and cannot complete the atomic update.
    
    So implement an exponential backoff when SMP.
    
    Signed-off-by: David S. Miller <[EMAIL PROTECTED]>
---
 arch/sparc64/lib/atomic.S     |   38 +++++++++++++++++++++++++++-----------
 arch/sparc64/lib/bitops.S     |   30 +++++++++++++++++++++---------
 include/asm-sparc64/backoff.h |   28 ++++++++++++++++++++++++++++
 3 files changed, 76 insertions(+), 20 deletions(-)

diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S
index 9633750..70ac418 100644
--- a/arch/sparc64/lib/atomic.S
+++ b/arch/sparc64/lib/atomic.S
@@ -1,10 +1,10 @@
-/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
- * atomic.S: These things are too big to do inline.
+/* atomic.S: These things are too big to do inline.
  *
- * Copyright (C) 1999 David S. Miller ([EMAIL PROTECTED])
+ * Copyright (C) 1999, 2007 David S. Miller ([EMAIL PROTECTED])
  */
 
 #include <asm/asi.h>
+#include <asm/backoff.h>
 
        .text
 
@@ -16,27 +16,31 @@
        .globl  atomic_add
        .type   atomic_add,#function
 atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
 1:     lduw    [%o1], %g1
        add     %g1, %o0, %g7
        cas     [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %icc, 1b
+       bne,pn  %icc, 2f
         nop
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic_add, .-atomic_add
 
        .globl  atomic_sub
        .type   atomic_sub,#function
 atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
 1:     lduw    [%o1], %g1
        sub     %g1, %o0, %g7
        cas     [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %icc, 1b
+       bne,pn  %icc, 2f
         nop
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic_sub, .-atomic_sub
 
        /* On SMP we need to use memory barriers to ensure
@@ -60,89 +64,101 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
        .globl  atomic_add_ret
        .type   atomic_add_ret,#function
 atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
        ATOMIC_PRE_BARRIER
 1:     lduw    [%o1], %g1
        add     %g1, %o0, %g7
        cas     [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %icc, 1b
+       bne,pn  %icc, 2f
         add    %g7, %o0, %g7
        sra     %g7, 0, %o0
        ATOMIC_POST_BARRIER
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic_add_ret, .-atomic_add_ret
 
        .globl  atomic_sub_ret
        .type   atomic_sub_ret,#function
 atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
        ATOMIC_PRE_BARRIER
 1:     lduw    [%o1], %g1
        sub     %g1, %o0, %g7
        cas     [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %icc, 1b
+       bne,pn  %icc, 2f
         sub    %g7, %o0, %g7
        sra     %g7, 0, %o0
        ATOMIC_POST_BARRIER
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic_sub_ret, .-atomic_sub_ret
 
        .globl  atomic64_add
        .type   atomic64_add,#function
 atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
 1:     ldx     [%o1], %g1
        add     %g1, %o0, %g7
        casx    [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         nop
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic64_add, .-atomic64_add
 
        .globl  atomic64_sub
        .type   atomic64_sub,#function
 atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
 1:     ldx     [%o1], %g1
        sub     %g1, %o0, %g7
        casx    [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         nop
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic64_sub, .-atomic64_sub
 
        .globl  atomic64_add_ret
        .type   atomic64_add_ret,#function
 atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
        ATOMIC_PRE_BARRIER
 1:     ldx     [%o1], %g1
        add     %g1, %o0, %g7
        casx    [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         add    %g7, %o0, %g7
        mov     %g7, %o0
        ATOMIC_POST_BARRIER
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic64_add_ret, .-atomic64_add_ret
 
        .globl  atomic64_sub_ret
        .type   atomic64_sub_ret,#function
 atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
+       BACKOFF_SETUP(%o2)
        ATOMIC_PRE_BARRIER
 1:     ldx     [%o1], %g1
        sub     %g1, %o0, %g7
        casx    [%o1], %g1, %g7
        cmp     %g1, %g7
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         sub    %g7, %o0, %g7
        mov     %g7, %o0
        ATOMIC_POST_BARRIER
        retl
         nop
+2:     BACKOFF_SPIN(%o2, %o3, 1b)
        .size   atomic64_sub_ret, .-atomic64_sub_ret
diff --git a/arch/sparc64/lib/bitops.S b/arch/sparc64/lib/bitops.S
index 892431a..6b015a6 100644
--- a/arch/sparc64/lib/bitops.S
+++ b/arch/sparc64/lib/bitops.S
@@ -1,10 +1,10 @@
-/* $Id: bitops.S,v 1.3 2001/11/18 00:12:56 davem Exp $
- * bitops.S: Sparc64 atomic bit operations.
+/* bitops.S: Sparc64 atomic bit operations.
  *
- * Copyright (C) 2000 David S. Miller ([EMAIL PROTECTED])
+ * Copyright (C) 2000, 2007 David S. Miller ([EMAIL PROTECTED])
  */
 
 #include <asm/asi.h>
+#include <asm/backoff.h>
 
        .text
 
@@ -29,6 +29,7 @@
        .globl  test_and_set_bit
        .type   test_and_set_bit,#function
 test_and_set_bit:      /* %o0=nr, %o1=addr */
+       BACKOFF_SETUP(%o3)
        BITOP_PRE_BARRIER
        srlx    %o0, 6, %g1
        mov     1, %o2
@@ -40,18 +41,20 @@ test_and_set_bit:   /* %o0=nr, %o1=addr */
        or      %g7, %o2, %g1
        casx    [%o1], %g7, %g1
        cmp     %g7, %g1
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         and    %g7, %o2, %g2
        clr     %o0
        movrne  %g2, 1, %o0
        BITOP_POST_BARRIER
        retl
         nop
+2:     BACKOFF_SPIN(%o3, %o4, 1b)
        .size   test_and_set_bit, .-test_and_set_bit
 
        .globl  test_and_clear_bit
        .type   test_and_clear_bit,#function
 test_and_clear_bit:    /* %o0=nr, %o1=addr */
+       BACKOFF_SETUP(%o3)
        BITOP_PRE_BARRIER
        srlx    %o0, 6, %g1
        mov     1, %o2
@@ -63,18 +66,20 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
        andn    %g7, %o2, %g1
        casx    [%o1], %g7, %g1
        cmp     %g7, %g1
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         and    %g7, %o2, %g2
        clr     %o0
        movrne  %g2, 1, %o0
        BITOP_POST_BARRIER
        retl
         nop
+2:     BACKOFF_SPIN(%o3, %o4, 1b)
        .size   test_and_clear_bit, .-test_and_clear_bit
 
        .globl  test_and_change_bit
        .type   test_and_change_bit,#function
 test_and_change_bit:   /* %o0=nr, %o1=addr */
+       BACKOFF_SETUP(%o3)
        BITOP_PRE_BARRIER
        srlx    %o0, 6, %g1
        mov     1, %o2
@@ -86,18 +91,20 @@ test_and_change_bit:        /* %o0=nr, %o1=addr */
        xor     %g7, %o2, %g1
        casx    [%o1], %g7, %g1
        cmp     %g7, %g1
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         and    %g7, %o2, %g2
        clr     %o0
        movrne  %g2, 1, %o0
        BITOP_POST_BARRIER
        retl
         nop
+2:     BACKOFF_SPIN(%o3, %o4, 1b)
        .size   test_and_change_bit, .-test_and_change_bit
 
        .globl  set_bit
        .type   set_bit,#function
 set_bit:               /* %o0=nr, %o1=addr */
+       BACKOFF_SETUP(%o3)
        srlx    %o0, 6, %g1
        mov     1, %o2
        sllx    %g1, 3, %g3
@@ -108,15 +115,17 @@ set_bit:          /* %o0=nr, %o1=addr */
        or      %g7, %o2, %g1
        casx    [%o1], %g7, %g1
        cmp     %g7, %g1
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         nop
        retl
         nop
+2:     BACKOFF_SPIN(%o3, %o4, 1b)
        .size   set_bit, .-set_bit
 
        .globl  clear_bit
        .type   clear_bit,#function
 clear_bit:             /* %o0=nr, %o1=addr */
+       BACKOFF_SETUP(%o3)
        srlx    %o0, 6, %g1
        mov     1, %o2
        sllx    %g1, 3, %g3
@@ -127,15 +136,17 @@ clear_bit:                /* %o0=nr, %o1=addr */
        andn    %g7, %o2, %g1
        casx    [%o1], %g7, %g1
        cmp     %g7, %g1
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         nop
        retl
         nop
+2:     BACKOFF_SPIN(%o3, %o4, 1b)
        .size   clear_bit, .-clear_bit
 
        .globl  change_bit
        .type   change_bit,#function
 change_bit:            /* %o0=nr, %o1=addr */
+       BACKOFF_SETUP(%o3)
        srlx    %o0, 6, %g1
        mov     1, %o2
        sllx    %g1, 3, %g3
@@ -146,8 +157,9 @@ change_bit:         /* %o0=nr, %o1=addr */
        xor     %g7, %o2, %g1
        casx    [%o1], %g7, %g1
        cmp     %g7, %g1
-       bne,pn  %xcc, 1b
+       bne,pn  %xcc, 2f
         nop
        retl
         nop
+2:     BACKOFF_SPIN(%o3, %o4, 1b)
        .size   change_bit, .-change_bit
diff --git a/include/asm-sparc64/backoff.h b/include/asm-sparc64/backoff.h
new file mode 100644
index 0000000..0e32f8b
--- /dev/null
+++ b/include/asm-sparc64/backoff.h
@@ -0,0 +1,28 @@
+#ifndef _SPARC64_BACKOFF_H
+#define _SPARC64_BACKOFF_H
+
+#define BACKOFF_LIMIT  (4 * 1024)
+
+#ifdef CONFIG_SMP
+
+#define BACKOFF_SETUP(reg)     \
+       mov     1, reg
+
+#define BACKOFF_SPIN(reg, tmp, label)  \
+       mov     reg, tmp; \
+88:    brnz,pt tmp, 88b; \
+        sub    tmp, 1, tmp; \
+       cmp     reg, BACKOFF_LIMIT; \
+       bg,pn   %xcc, label; \
+        nop; \
+       ba,pt   %xcc, label; \
+        sllx   reg, 1, reg;
+
+#else
+
+#define BACKOFF_SETUP(reg)
+#define BACKOFF_SPIN(reg, tmp, label)
+
+#endif
+
+#endif /* _SPARC64_BACKOFF_H */
-
To unsubscribe from this list: send the line "unsubscribe git-commits-head" in
the body of a message to [EMAIL PROTECTED]
More majordomo info at  http://vger.kernel.org/majordomo-info.html

Reply via email to