The patch titled
     Fix FRV cmpxchg_local
has been added to the -mm tree.  Its filename is
     fix-frv-cmpxchg_local.patch

Before you just go and hit "reply", please:
   a) Consider who else should be cc'ed
   b) Prefer to cc a suitable mailing list as well
   c) Ideally: find the original patch on the mailing list and do a
      reply-to-all to that, adding suitable additional cc's

*** Remember to use Documentation/SubmitChecklist when testing your code ***

See http://www.zip.com.au/~akpm/linux/patches/stuff/added-to-mm.txt to find
out what to do about this

The current -mm tree may be found at http://userweb.kernel.org/~akpm/mmotm/

------------------------------------------------------
Subject: Fix FRV cmpxchg_local
From: Mathieu Desnoyers <[EMAIL PROTECTED]>

Fix the FRV cmpxchg_local by breaking the following header dependency loop :

linux/kernel.h -> linux/bitops.h -> asm-frv/bitops.h -> asm-frv/atomic.h
  -> asm-frv/system.h ->
  asm-generic/cmpxchg_local.h -> typecheck() defined in linux/kernel.h

and

linux/kernel.h -> linux/bitops.h -> asm-frv/bitops.h -> asm-frv/atomic.h ->
  asm-generic/cmpxchg_local.h -> typecheck() defined in linux/kernel.h

In order to fix this :
- Move the atomic_test_and_ *_mask inlines from asm-frv/atomic.h (why are they
  there at all anyway ? They are not touching atomic_t variables!) to
  asm-frv/bitops.h.

Also fix a build issue with cmpxchg : it does not cast to (unsigned long *)
like other architectures, to deal with it in the cmpxchg_local macro.

FRV builds fine with this patch.

Thanks to Adrian Bunk <[EMAIL PROTECTED]> for spotting this bug.

Signed-off-by: Mathieu Desnoyers <[EMAIL PROTECTED]>
Cc: Adrian Bunk <[EMAIL PROTECTED]>
Cc: David Howells <[EMAIL PROTECTED]>
Signed-off-by: Andrew Morton <[EMAIL PROTECTED]>
---

 include/asm-frv/atomic.h |   81 ------------------------------------
 include/asm-frv/bitops.h |   83 ++++++++++++++++++++++++++++++++++++-
 include/asm-frv/system.h |    3 -
 3 files changed, 83 insertions(+), 84 deletions(-)

diff -puN include/asm-frv/atomic.h~fix-frv-cmpxchg_local 
include/asm-frv/atomic.h
--- a/include/asm-frv/atomic.h~fix-frv-cmpxchg_local
+++ a/include/asm-frv/atomic.h
@@ -125,87 +125,6 @@ static inline void atomic_dec(atomic_t *
 #define atomic_dec_and_test(v)         (atomic_sub_return(1, (v)) == 0)
 #define atomic_inc_and_test(v)         (atomic_add_return(1, (v)) == 0)
 
-#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
-static inline
-unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile 
unsigned long *v)
-{
-       unsigned long old, tmp;
-
-       asm volatile(
-               "0:                                             \n"
-               "       orcc            gr0,gr0,gr0,icc3        \n"     /* set 
ICC3.Z */
-               "       ckeq            icc3,cc7                \n"
-               "       ld.p            %M0,%1                  \n"     /* 
LD.P/ORCR are atomic */
-               "       orcr            cc7,cc7,cc3             \n"     /* set 
CC3 to true */
-               "       and%I3          %1,%3,%2                \n"
-               "       cst.p           %2,%M0          ,cc3,#1 \n"     /* if 
store happens... */
-               "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     /* ... 
clear ICC3.Z */
-               "       beq             icc3,#0,0b              \n"
-               : "+U"(*v), "=&r"(old), "=r"(tmp)
-               : "NPr"(~mask)
-               : "memory", "cc7", "cc3", "icc3"
-               );
-
-       return old;
-}
-
-static inline
-unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned 
long *v)
-{
-       unsigned long old, tmp;
-
-       asm volatile(
-               "0:                                             \n"
-               "       orcc            gr0,gr0,gr0,icc3        \n"     /* set 
ICC3.Z */
-               "       ckeq            icc3,cc7                \n"
-               "       ld.p            %M0,%1                  \n"     /* 
LD.P/ORCR are atomic */
-               "       orcr            cc7,cc7,cc3             \n"     /* set 
CC3 to true */
-               "       or%I3           %1,%3,%2                \n"
-               "       cst.p           %2,%M0          ,cc3,#1 \n"     /* if 
store happens... */
-               "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     /* ... 
clear ICC3.Z */
-               "       beq             icc3,#0,0b              \n"
-               : "+U"(*v), "=&r"(old), "=r"(tmp)
-               : "NPr"(mask)
-               : "memory", "cc7", "cc3", "icc3"
-               );
-
-       return old;
-}
-
-static inline
-unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned 
long *v)
-{
-       unsigned long old, tmp;
-
-       asm volatile(
-               "0:                                             \n"
-               "       orcc            gr0,gr0,gr0,icc3        \n"     /* set 
ICC3.Z */
-               "       ckeq            icc3,cc7                \n"
-               "       ld.p            %M0,%1                  \n"     /* 
LD.P/ORCR are atomic */
-               "       orcr            cc7,cc7,cc3             \n"     /* set 
CC3 to true */
-               "       xor%I3          %1,%3,%2                \n"
-               "       cst.p           %2,%M0          ,cc3,#1 \n"     /* if 
store happens... */
-               "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     /* ... 
clear ICC3.Z */
-               "       beq             icc3,#0,0b              \n"
-               : "+U"(*v), "=&r"(old), "=r"(tmp)
-               : "NPr"(mask)
-               : "memory", "cc7", "cc3", "icc3"
-               );
-
-       return old;
-}
-
-#else
-
-extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile 
unsigned long *v);
-extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile 
unsigned long *v);
-extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile 
unsigned long *v);
-
-#endif
-
-#define atomic_clear_mask(mask, v)     atomic_test_and_ANDNOT_mask((mask), (v))
-#define atomic_set_mask(mask, v)       atomic_test_and_OR_mask((mask), (v))
-
 /*****************************************************************************/
 /*
  * exchange value with memory
diff -puN include/asm-frv/bitops.h~fix-frv-cmpxchg_local 
include/asm-frv/bitops.h
--- a/include/asm-frv/bitops.h~fix-frv-cmpxchg_local
+++ a/include/asm-frv/bitops.h
@@ -16,8 +16,6 @@
 
 #include <linux/compiler.h>
 #include <asm/byteorder.h>
-#include <asm/system.h>
-#include <asm/atomic.h>
 
 #ifdef __KERNEL__
 
@@ -33,6 +31,87 @@
 #define smp_mb__before_clear_bit()     barrier()
 #define smp_mb__after_clear_bit()      barrier()
 
+#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
+static inline
+unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile 
unsigned long *v)
+{
+       unsigned long old, tmp;
+
+       asm volatile(
+               "0:                                             \n"
+               "       orcc            gr0,gr0,gr0,icc3        \n"     /* set 
ICC3.Z */
+               "       ckeq            icc3,cc7                \n"
+               "       ld.p            %M0,%1                  \n"     /* 
LD.P/ORCR are atomic */
+               "       orcr            cc7,cc7,cc3             \n"     /* set 
CC3 to true */
+               "       and%I3          %1,%3,%2                \n"
+               "       cst.p           %2,%M0          ,cc3,#1 \n"     /* if 
store happens... */
+               "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     /* ... 
clear ICC3.Z */
+               "       beq             icc3,#0,0b              \n"
+               : "+U"(*v), "=&r"(old), "=r"(tmp)
+               : "NPr"(~mask)
+               : "memory", "cc7", "cc3", "icc3"
+               );
+
+       return old;
+}
+
+static inline
+unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned 
long *v)
+{
+       unsigned long old, tmp;
+
+       asm volatile(
+               "0:                                             \n"
+               "       orcc            gr0,gr0,gr0,icc3        \n"     /* set 
ICC3.Z */
+               "       ckeq            icc3,cc7                \n"
+               "       ld.p            %M0,%1                  \n"     /* 
LD.P/ORCR are atomic */
+               "       orcr            cc7,cc7,cc3             \n"     /* set 
CC3 to true */
+               "       or%I3           %1,%3,%2                \n"
+               "       cst.p           %2,%M0          ,cc3,#1 \n"     /* if 
store happens... */
+               "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     /* ... 
clear ICC3.Z */
+               "       beq             icc3,#0,0b              \n"
+               : "+U"(*v), "=&r"(old), "=r"(tmp)
+               : "NPr"(mask)
+               : "memory", "cc7", "cc3", "icc3"
+               );
+
+       return old;
+}
+
+static inline
+unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned 
long *v)
+{
+       unsigned long old, tmp;
+
+       asm volatile(
+               "0:                                             \n"
+               "       orcc            gr0,gr0,gr0,icc3        \n"     /* set 
ICC3.Z */
+               "       ckeq            icc3,cc7                \n"
+               "       ld.p            %M0,%1                  \n"     /* 
LD.P/ORCR are atomic */
+               "       orcr            cc7,cc7,cc3             \n"     /* set 
CC3 to true */
+               "       xor%I3          %1,%3,%2                \n"
+               "       cst.p           %2,%M0          ,cc3,#1 \n"     /* if 
store happens... */
+               "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     /* ... 
clear ICC3.Z */
+               "       beq             icc3,#0,0b              \n"
+               : "+U"(*v), "=&r"(old), "=r"(tmp)
+               : "NPr"(mask)
+               : "memory", "cc7", "cc3", "icc3"
+               );
+
+       return old;
+}
+
+#else
+
+extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile 
unsigned long *v);
+extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile 
unsigned long *v);
+extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile 
unsigned long *v);
+
+#endif
+
+#define atomic_clear_mask(mask, v)     atomic_test_and_ANDNOT_mask((mask), (v))
+#define atomic_set_mask(mask, v)       atomic_test_and_OR_mask((mask), (v))
+
 static inline int test_and_clear_bit(int nr, volatile void *addr)
 {
        volatile unsigned long *ptr = addr;
diff -puN include/asm-frv/system.h~fix-frv-cmpxchg_local 
include/asm-frv/system.h
--- a/include/asm-frv/system.h~fix-frv-cmpxchg_local
+++ a/include/asm-frv/system.h
@@ -14,6 +14,7 @@
 
 #include <linux/types.h>
 #include <linux/linkage.h>
+#include <linux/kernel.h>
 
 struct thread_struct;
 
@@ -276,7 +277,7 @@ static inline unsigned long __cmpxchg_lo
 {
        switch (size) {
        case 4:
-               return cmpxchg(ptr, old, new);
+               return cmpxchg((unsigned long *)ptr, old, new);
        default:
                return __cmpxchg_local_generic(ptr, old, new, size);
        }
_

Patches currently in -mm which might be from [EMAIL PROTECTED] are

origin.patch
fix-frv-cmpxchg_local.patch

-
To unsubscribe from this list: send the line "unsubscribe mm-commits" in
the body of a message to [EMAIL PROTECTED]
More majordomo info at  http://vger.kernel.org/majordomo-info.html

Reply via email to