From: Andi Kleen <a...@linux.intel.com>

__atomic_add_unless is fairly big and often used, so it's quite expensive
to inline it. But it's unlikely that a call makes much difference for
such a complex function doing an expensive atomic. So out of line it.

This saves around 12k of text.

   text    data     bss     dec     hex filename
9084246 5367600 11116544        25568390        1862486 vmlinux-atomic-add
9096494 5367568 11116544        25580606        186543e 
vmlinux-before-atomic-add

Cc: x...@kernel.org
Signed-off-by: Andi Kleen <a...@linux.intel.com>
---
 arch/x86/include/asm/atomic.h | 24 +-----------------------
 arch/x86/lib/Makefile         |  1 +
 arch/x86/lib/atomic.c         | 27 +++++++++++++++++++++++++++
 3 files changed, 29 insertions(+), 23 deletions(-)
 create mode 100644 arch/x86/lib/atomic.c

diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index 14635c5ea025..069d69712275 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -225,29 +225,7 @@ ATOMIC_OPS(xor, ^)
 #undef ATOMIC_FETCH_OP
 #undef ATOMIC_OP
 
-/**
- * __atomic_add_unless - add unless the number is already a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as @v was not already @u.
- * Returns the old value of @v.
- */
-static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
-{
-       int c, old;
-       c = atomic_read(v);
-       for (;;) {
-               if (unlikely(c == (u)))
-                       break;
-               old = atomic_cmpxchg((v), c, c + (a));
-               if (likely(old == c))
-                       break;
-               c = old;
-       }
-       return c;
-}
+int __atomic_add_unless(atomic_t *v, int a, int u);
 
 /**
  * atomic_inc_short - increment of a short integer
diff --git a/arch/x86/lib/Makefile b/arch/x86/lib/Makefile
index 34a74131a12c..81303cefa9f4 100644
--- a/arch/x86/lib/Makefile
+++ b/arch/x86/lib/Makefile
@@ -25,6 +25,7 @@ lib-y += memcpy_$(BITS).o
 lib-$(CONFIG_RWSEM_XCHGADD_ALGORITHM) += rwsem.o
 lib-$(CONFIG_INSTRUCTION_DECODER) += insn.o inat.o
 lib-$(CONFIG_RANDOMIZE_BASE) += kaslr.o
+lib-y += atomic.o
 
 obj-y += msr.o msr-reg.o msr-reg-export.o hweight.o
 
diff --git a/arch/x86/lib/atomic.c b/arch/x86/lib/atomic.c
new file mode 100644
index 000000000000..dde7ddf67698
--- /dev/null
+++ b/arch/x86/lib/atomic.c
@@ -0,0 +1,27 @@
+#include <linux/module.h>
+#include <asm/atomic.h>
+
+/**
+ * __atomic_add_unless - add unless the number is already a given value
+ * @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
+ *
+ * Atomically adds @a to @v, so long as @v was not already @u.
+ * Returns the old value of @v.
+ */
+int __atomic_add_unless(atomic_t *v, int a, int u)
+{
+       int c, old;
+       c = atomic_read(v);
+       for (;;) {
+               if (unlikely(c == (u)))
+                       break;
+               old = atomic_cmpxchg((v), c, c + (a));
+               if (likely(old == c))
+                       break;
+               c = old;
+       }
+       return c;
+}
+EXPORT_SYMBOL(__atomic_add_unless);
-- 
2.9.3

Reply via email to