[tip:locking/core] locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

2018-03-12 Thread tip-bot for Dmitry Vyukov
Commit-ID:  8bf705d130396e69c04cd8e6e010244ad2ce71f4
Gitweb: https://git.kernel.org/tip/8bf705d130396e69c04cd8e6e010244ad2ce71f4
Author: Dmitry Vyukov 
AuthorDate: Mon, 29 Jan 2018 18:26:05 +0100
Committer:  Ingo Molnar 
CommitDate: Mon, 12 Mar 2018 12:15:35 +0100

locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

Add arch_ prefix to all atomic operations and include
. This will allow
to add KASAN instrumentation to all atomic ops.

Signed-off-by: Dmitry Vyukov 
Cc: Andrew Morton 
Cc: Andrey Ryabinin 
Cc: Linus Torvalds 
Cc: Mark Rutland 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: Will Deacon 
Cc: kasan-...@googlegroups.com
Cc: linux...@kvack.org
Link: 
http://lkml.kernel.org/r/54f0eb64260b84199e538652e079a89b5423ad41.1517246437.git.dvyu...@google.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/include/asm/atomic.h  | 102 ++-
 arch/x86/include/asm/atomic64_32.h | 106 ++--
 arch/x86/include/asm/atomic64_64.h | 108 ++---
 arch/x86/include/asm/cmpxchg.h |  12 ++---
 arch/x86/include/asm/cmpxchg_32.h  |   8 +--
 arch/x86/include/asm/cmpxchg_64.h  |   4 +-
 6 files changed, 172 insertions(+), 168 deletions(-)

diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index 72759f131cc5..33afc966d6a9 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -17,36 +17,36 @@
 #define ATOMIC_INIT(i) { (i) }
 
 /**
- * atomic_read - read atomic variable
+ * arch_atomic_read - read atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically reads the value of @v.
  */
-static __always_inline int atomic_read(const atomic_t *v)
+static __always_inline int arch_atomic_read(const atomic_t *v)
 {
return READ_ONCE((v)->counter);
 }
 
 /**
- * atomic_set - set atomic variable
+ * arch_atomic_set - set atomic variable
  * @v: pointer of type atomic_t
  * @i: required value
  *
  * Atomically sets the value of @v to @i.
  */
-static __always_inline void atomic_set(atomic_t *v, int i)
+static __always_inline void arch_atomic_set(atomic_t *v, int i)
 {
WRITE_ONCE(v->counter, i);
 }
 
 /**
- * atomic_add - add integer to atomic variable
+ * arch_atomic_add - add integer to atomic variable
  * @i: integer value to add
  * @v: pointer of type atomic_t
  *
  * Atomically adds @i to @v.
  */
-static __always_inline void atomic_add(int i, atomic_t *v)
+static __always_inline void arch_atomic_add(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "addl %1,%0"
 : "+m" (v->counter)
@@ -54,13 +54,13 @@ static __always_inline void atomic_add(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub - subtract integer from atomic variable
+ * arch_atomic_sub - subtract integer from atomic variable
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
  * Atomically subtracts @i from @v.
  */
-static __always_inline void atomic_sub(int i, atomic_t *v)
+static __always_inline void arch_atomic_sub(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "subl %1,%0"
 : "+m" (v->counter)
@@ -68,7 +68,7 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub_and_test - subtract value from variable and test result
+ * arch_atomic_sub_and_test - subtract value from variable and test result
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
@@ -76,63 +76,63 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
  * true if the result is zero, or false for all
  * other cases.
  */
-static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
+static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
 {
GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
 }
 
 /**
- * atomic_inc - increment atomic variable
+ * arch_atomic_inc - increment atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically increments @v by 1.
  */
-static __always_inline void atomic_inc(atomic_t *v)
+static __always_inline void arch_atomic_inc(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "incl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec - decrement atomic variable
+ * arch_atomic_dec - decrement atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v by 1.
  */
-static __always_inline void atomic_dec(atomic_t *v)
+static __always_inline void arch_atomic_dec(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "decl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec_and_test - decrement and test
+ * arch_atomic_dec_and_test - decrement and test
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v by 1 

[tip:locking/core] locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

2018-03-12 Thread tip-bot for Dmitry Vyukov
Commit-ID:  8bf705d130396e69c04cd8e6e010244ad2ce71f4
Gitweb: https://git.kernel.org/tip/8bf705d130396e69c04cd8e6e010244ad2ce71f4
Author: Dmitry Vyukov 
AuthorDate: Mon, 29 Jan 2018 18:26:05 +0100
Committer:  Ingo Molnar 
CommitDate: Mon, 12 Mar 2018 12:15:35 +0100

locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

Add arch_ prefix to all atomic operations and include
. This will allow
to add KASAN instrumentation to all atomic ops.

Signed-off-by: Dmitry Vyukov 
Cc: Andrew Morton 
Cc: Andrey Ryabinin 
Cc: Linus Torvalds 
Cc: Mark Rutland 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: Will Deacon 
Cc: kasan-...@googlegroups.com
Cc: linux...@kvack.org
Link: 
http://lkml.kernel.org/r/54f0eb64260b84199e538652e079a89b5423ad41.1517246437.git.dvyu...@google.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/include/asm/atomic.h  | 102 ++-
 arch/x86/include/asm/atomic64_32.h | 106 ++--
 arch/x86/include/asm/atomic64_64.h | 108 ++---
 arch/x86/include/asm/cmpxchg.h |  12 ++---
 arch/x86/include/asm/cmpxchg_32.h  |   8 +--
 arch/x86/include/asm/cmpxchg_64.h  |   4 +-
 6 files changed, 172 insertions(+), 168 deletions(-)

diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index 72759f131cc5..33afc966d6a9 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -17,36 +17,36 @@
 #define ATOMIC_INIT(i) { (i) }
 
 /**
- * atomic_read - read atomic variable
+ * arch_atomic_read - read atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically reads the value of @v.
  */
-static __always_inline int atomic_read(const atomic_t *v)
+static __always_inline int arch_atomic_read(const atomic_t *v)
 {
return READ_ONCE((v)->counter);
 }
 
 /**
- * atomic_set - set atomic variable
+ * arch_atomic_set - set atomic variable
  * @v: pointer of type atomic_t
  * @i: required value
  *
  * Atomically sets the value of @v to @i.
  */
-static __always_inline void atomic_set(atomic_t *v, int i)
+static __always_inline void arch_atomic_set(atomic_t *v, int i)
 {
WRITE_ONCE(v->counter, i);
 }
 
 /**
- * atomic_add - add integer to atomic variable
+ * arch_atomic_add - add integer to atomic variable
  * @i: integer value to add
  * @v: pointer of type atomic_t
  *
  * Atomically adds @i to @v.
  */
-static __always_inline void atomic_add(int i, atomic_t *v)
+static __always_inline void arch_atomic_add(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "addl %1,%0"
 : "+m" (v->counter)
@@ -54,13 +54,13 @@ static __always_inline void atomic_add(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub - subtract integer from atomic variable
+ * arch_atomic_sub - subtract integer from atomic variable
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
  * Atomically subtracts @i from @v.
  */
-static __always_inline void atomic_sub(int i, atomic_t *v)
+static __always_inline void arch_atomic_sub(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "subl %1,%0"
 : "+m" (v->counter)
@@ -68,7 +68,7 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub_and_test - subtract value from variable and test result
+ * arch_atomic_sub_and_test - subtract value from variable and test result
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
@@ -76,63 +76,63 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
  * true if the result is zero, or false for all
  * other cases.
  */
-static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
+static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
 {
GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
 }
 
 /**
- * atomic_inc - increment atomic variable
+ * arch_atomic_inc - increment atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically increments @v by 1.
  */
-static __always_inline void atomic_inc(atomic_t *v)
+static __always_inline void arch_atomic_inc(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "incl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec - decrement atomic variable
+ * arch_atomic_dec - decrement atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v by 1.
  */
-static __always_inline void atomic_dec(atomic_t *v)
+static __always_inline void arch_atomic_dec(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "decl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec_and_test - decrement and test
+ * arch_atomic_dec_and_test - decrement and test
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v by 1 and
  * returns true if the result is 0, or false for all other
  * cases.
  */
-static __always_inline bool atomic_dec_and_test(atomic_t *v)
+static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
 {

[tip:locking/core] locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

2017-06-23 Thread tip-bot for Dmitry Vyukov
Commit-ID:  1a870f0aa88faf2e5c72303c89a58ebb8638270a
Gitweb: http://git.kernel.org/tip/1a870f0aa88faf2e5c72303c89a58ebb8638270a
Author: Dmitry Vyukov 
AuthorDate: Thu, 22 Jun 2017 16:14:16 +0200
Committer:  Ingo Molnar 
CommitDate: Fri, 23 Jun 2017 10:50:19 +0200

locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

Add arch_ prefix to all atomic operations and include
. This will allow
to add KASAN instrumentation to all atomic ops.

Signed-off-by: Dmitry Vyukov 
Reviewed-by: Andrey Ryabinin 
Acked-by: Mark Rutland 
Cc: Andrew Morton 
Cc: Linus Torvalds 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: Will Deacon 
Cc: kasan-...@googlegroups.com
Cc: linux...@kvack.org
Link: 
http://lkml.kernel.org/r/ff85407a7476ac41bfbdd46a35a93b8f57fa4b1e.1498140838.git.dvyu...@google.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/include/asm/atomic.h  | 102 ++-
 arch/x86/include/asm/atomic64_32.h | 106 ++--
 arch/x86/include/asm/atomic64_64.h | 108 ++---
 arch/x86/include/asm/cmpxchg.h |  12 ++---
 arch/x86/include/asm/cmpxchg_32.h  |   8 +--
 arch/x86/include/asm/cmpxchg_64.h  |   4 +-
 6 files changed, 172 insertions(+), 168 deletions(-)

diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index 0874ebd..03dd2a6 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -16,36 +16,36 @@
 #define ATOMIC_INIT(i) { (i) }
 
 /**
- * atomic_read - read atomic variable
+ * arch_atomic_read - read atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically reads the value of @v.
  */
-static __always_inline int atomic_read(const atomic_t *v)
+static __always_inline int arch_atomic_read(const atomic_t *v)
 {
return READ_ONCE((v)->counter);
 }
 
 /**
- * atomic_set - set atomic variable
+ * arch_atomic_set - set atomic variable
  * @v: pointer of type atomic_t
  * @i: required value
  *
  * Atomically sets the value of @v to @i.
  */
-static __always_inline void atomic_set(atomic_t *v, int i)
+static __always_inline void arch_atomic_set(atomic_t *v, int i)
 {
WRITE_ONCE(v->counter, i);
 }
 
 /**
- * atomic_add - add integer to atomic variable
+ * arch_atomic_add - add integer to atomic variable
  * @i: integer value to add
  * @v: pointer of type atomic_t
  *
  * Atomically adds @i to @v.
  */
-static __always_inline void atomic_add(int i, atomic_t *v)
+static __always_inline void arch_atomic_add(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "addl %1,%0"
 : "+m" (v->counter)
@@ -53,13 +53,13 @@ static __always_inline void atomic_add(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub - subtract integer from atomic variable
+ * arch_atomic_sub - subtract integer from atomic variable
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
  * Atomically subtracts @i from @v.
  */
-static __always_inline void atomic_sub(int i, atomic_t *v)
+static __always_inline void arch_atomic_sub(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "subl %1,%0"
 : "+m" (v->counter)
@@ -67,7 +67,7 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub_and_test - subtract value from variable and test result
+ * arch_atomic_sub_and_test - subtract value from variable and test result
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
@@ -75,63 +75,63 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
  * true if the result is zero, or false for all
  * other cases.
  */
-static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
+static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
 {
GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
 }
 
 /**
- * atomic_inc - increment atomic variable
+ * arch_atomic_inc - increment atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically increments @v by 1.
  */
-static __always_inline void atomic_inc(atomic_t *v)
+static __always_inline void arch_atomic_inc(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "incl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec - decrement atomic variable
+ * arch_atomic_dec - decrement atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v by 1.
  */
-static __always_inline void atomic_dec(atomic_t *v)
+static __always_inline void arch_atomic_dec(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "decl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec_and_test - decrement and test
+ * arch_atomic_dec_and_test - decrement and test
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v 

[tip:locking/core] locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

2017-06-23 Thread tip-bot for Dmitry Vyukov
Commit-ID:  1a870f0aa88faf2e5c72303c89a58ebb8638270a
Gitweb: http://git.kernel.org/tip/1a870f0aa88faf2e5c72303c89a58ebb8638270a
Author: Dmitry Vyukov 
AuthorDate: Thu, 22 Jun 2017 16:14:16 +0200
Committer:  Ingo Molnar 
CommitDate: Fri, 23 Jun 2017 10:50:19 +0200

locking/atomic/x86: Switch atomic.h to use atomic-instrumented.h

Add arch_ prefix to all atomic operations and include
. This will allow
to add KASAN instrumentation to all atomic ops.

Signed-off-by: Dmitry Vyukov 
Reviewed-by: Andrey Ryabinin 
Acked-by: Mark Rutland 
Cc: Andrew Morton 
Cc: Linus Torvalds 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: Will Deacon 
Cc: kasan-...@googlegroups.com
Cc: linux...@kvack.org
Link: 
http://lkml.kernel.org/r/ff85407a7476ac41bfbdd46a35a93b8f57fa4b1e.1498140838.git.dvyu...@google.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/include/asm/atomic.h  | 102 ++-
 arch/x86/include/asm/atomic64_32.h | 106 ++--
 arch/x86/include/asm/atomic64_64.h | 108 ++---
 arch/x86/include/asm/cmpxchg.h |  12 ++---
 arch/x86/include/asm/cmpxchg_32.h  |   8 +--
 arch/x86/include/asm/cmpxchg_64.h  |   4 +-
 6 files changed, 172 insertions(+), 168 deletions(-)

diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index 0874ebd..03dd2a6 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -16,36 +16,36 @@
 #define ATOMIC_INIT(i) { (i) }
 
 /**
- * atomic_read - read atomic variable
+ * arch_atomic_read - read atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically reads the value of @v.
  */
-static __always_inline int atomic_read(const atomic_t *v)
+static __always_inline int arch_atomic_read(const atomic_t *v)
 {
return READ_ONCE((v)->counter);
 }
 
 /**
- * atomic_set - set atomic variable
+ * arch_atomic_set - set atomic variable
  * @v: pointer of type atomic_t
  * @i: required value
  *
  * Atomically sets the value of @v to @i.
  */
-static __always_inline void atomic_set(atomic_t *v, int i)
+static __always_inline void arch_atomic_set(atomic_t *v, int i)
 {
WRITE_ONCE(v->counter, i);
 }
 
 /**
- * atomic_add - add integer to atomic variable
+ * arch_atomic_add - add integer to atomic variable
  * @i: integer value to add
  * @v: pointer of type atomic_t
  *
  * Atomically adds @i to @v.
  */
-static __always_inline void atomic_add(int i, atomic_t *v)
+static __always_inline void arch_atomic_add(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "addl %1,%0"
 : "+m" (v->counter)
@@ -53,13 +53,13 @@ static __always_inline void atomic_add(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub - subtract integer from atomic variable
+ * arch_atomic_sub - subtract integer from atomic variable
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
  * Atomically subtracts @i from @v.
  */
-static __always_inline void atomic_sub(int i, atomic_t *v)
+static __always_inline void arch_atomic_sub(int i, atomic_t *v)
 {
asm volatile(LOCK_PREFIX "subl %1,%0"
 : "+m" (v->counter)
@@ -67,7 +67,7 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
 }
 
 /**
- * atomic_sub_and_test - subtract value from variable and test result
+ * arch_atomic_sub_and_test - subtract value from variable and test result
  * @i: integer value to subtract
  * @v: pointer of type atomic_t
  *
@@ -75,63 +75,63 @@ static __always_inline void atomic_sub(int i, atomic_t *v)
  * true if the result is zero, or false for all
  * other cases.
  */
-static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
+static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
 {
GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
 }
 
 /**
- * atomic_inc - increment atomic variable
+ * arch_atomic_inc - increment atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically increments @v by 1.
  */
-static __always_inline void atomic_inc(atomic_t *v)
+static __always_inline void arch_atomic_inc(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "incl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec - decrement atomic variable
+ * arch_atomic_dec - decrement atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v by 1.
  */
-static __always_inline void atomic_dec(atomic_t *v)
+static __always_inline void arch_atomic_dec(atomic_t *v)
 {
asm volatile(LOCK_PREFIX "decl %0"
 : "+m" (v->counter));
 }
 
 /**
- * atomic_dec_and_test - decrement and test
+ * arch_atomic_dec_and_test - decrement and test
  * @v: pointer of type atomic_t
  *
  * Atomically decrements @v by 1 and
  * returns true if the result is 0, or false for all other
  * cases.
  */
-static __always_inline bool atomic_dec_and_test(atomic_t *v)
+static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
 {