Author: aurel32
Date: 2015-10-30 10:30:59 +0000 (Fri, 30 Oct 2015)
New Revision: 6676

Added:
   glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-atomic.diff
   glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-inline-syscall.diff
Modified:
   glibc-package/branches/glibc-2.21/debian/changelog
   glibc-package/branches/glibc-2.21/debian/patches/series
Log:
patches/hppa/cvs-atomic.diff, patches/hppa/cvs-inline-syscall.diff: new
patches from upstream to improve atomic and inline syscalls on HPPA
(closes: #799478).

Modified: glibc-package/branches/glibc-2.21/debian/changelog
===================================================================
--- glibc-package/branches/glibc-2.21/debian/changelog  2015-10-30 10:12:57 UTC 
(rev 6675)
+++ glibc-package/branches/glibc-2.21/debian/changelog  2015-10-30 10:30:59 UTC 
(rev 6676)
@@ -27,6 +27,9 @@
     add missing Advanced API (RFC3542) (1) defines.  Closes: #753909.
   * debian/rules: don't put debug files from libc0.1-i386 and libc6-mips32
     into libc0.1-dbg or libc6-dbg.
+  * patches/hppa/cvs-atomic.diff, patches/hppa/cvs-inline-syscall.diff: new
+    patches from upstream to improve atomic and inline syscalls on HPPA
+    (closes: #799478).
 
   [ Samuel Thibault ]
   * patches/hurd-i386/tg-pagesize.diff: Refresh.

Added: glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-atomic.diff
===================================================================
--- glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-atomic.diff       
                        (rev 0)
+++ glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-atomic.diff       
2015-10-30 10:30:59 UTC (rev 6676)
@@ -0,0 +1,71 @@
+2015-08-08  John David Anglin  <[email protected]>
+
+       [BZ #18787]
+       * sysdeps/unix/sysv/linux/hppa/bits/atomic.h (_LWS_CLOBBER): Revise
+       clobber registers.
+       (atomic_compare_and_exchange_val_acq): Use register asms to assign
+       operand registers.  Use register %r20 for EAGAIN and EDEADLOCK checks.
+       Cast return to __typeof (oldval).
+
+--- a/sysdeps/unix/sysv/linux/hppa/bits/atomic.h
++++ b/sysdeps/unix/sysv/linux/hppa/bits/atomic.h
+@@ -56,42 +56,41 @@ typedef uintmax_t uatomic_max_t;
+ #define _LWS "0xb0"
+ #define _LWS_CAS "0"
+ /* Note r31 is the link register.  */
+-#define _LWS_CLOBBER "r1", "r26", "r25", "r24", "r23", "r22", "r21", "r20", 
"r28", "r31", "memory"
++#define _LWS_CLOBBER "r1", "r23", "r22", "r20", "r31", "memory"
+ /* String constant for -EAGAIN.  */
+ #define _ASM_EAGAIN "-11"
+ /* String constant for -EDEADLOCK.  */
+ #define _ASM_EDEADLOCK "-45"
+ 
+ #if __ASSUME_LWS_CAS
+-/* The only basic operation needed is compare and exchange.  */
++/* The only basic operation needed is compare and exchange.  The mem
++   pointer must be word aligned.  */
+ # define atomic_compare_and_exchange_val_acq(mem, newval, oldval)     \
+   ({                                                                  \
+-     volatile int lws_errno;                                          \
+-     __typeof__ (*mem) lws_ret;                                               
\
+-     asm volatile(                                                    \
++     register long lws_errno asm("r21");                              \
++     register unsigned long lws_ret asm("r28");                               
\
++     register unsigned long lws_mem asm("r26") = (unsigned long)(mem);        
\
++     register unsigned long lws_old asm("r25") = (unsigned long)(oldval);\
++     register unsigned long lws_new asm("r24") = (unsigned long)(newval);\
++     __asm__ __volatile__(                                            \
+       "0:                                     \n\t"                   \
+-      "copy   %2, %%r26                       \n\t"                   \
+-      "copy   %3, %%r25                       \n\t"                   \
+-      "copy   %4, %%r24                       \n\t"                   \
+       "ble    " _LWS "(%%sr2, %%r0)           \n\t"                   \
+       "ldi    " _LWS_CAS ", %%r20             \n\t"                   \
+-      "ldi    " _ASM_EAGAIN ", %%r24          \n\t"                   \
+-      "cmpb,=,n %%r24, %%r21, 0b              \n\t"                   \
++      "ldi    " _ASM_EAGAIN ", %%r20          \n\t"                   \
++      "cmpb,=,n %%r20, %%r21, 0b              \n\t"                   \
+       "nop                                    \n\t"                   \
+-      "ldi    " _ASM_EDEADLOCK ", %%r25       \n\t"                   \
+-      "cmpb,=,n %%r25, %%r21, 0b              \n\t"                   \
++      "ldi    " _ASM_EDEADLOCK ", %%r20       \n\t"                   \
++      "cmpb,=,n %%r20, %%r21, 0b              \n\t"                   \
+       "nop                                    \n\t"                   \
+-      "stw    %%r28, %0                       \n\t"                   \
+-      "stw    %%r21, %1                       \n\t"                   \
+-      : "=m" (lws_ret), "=m" (lws_errno)                              \
+-        : "r" (mem), "r" (oldval), "r" (newval)                               
\
++      : "=r" (lws_ret), "=r" (lws_errno)                              \
++      : "r" (lws_mem), "r" (lws_old), "r" (lws_new)                   \
+       : _LWS_CLOBBER                                                  \
+      );                                                                       
\
+                                                                       \
+-     if(lws_errno == -EFAULT || lws_errno == -ENOSYS)                 \
++     if (lws_errno == -EFAULT || lws_errno == -ENOSYS)                        
\
+       ABORT_INSTRUCTION;                                              \
+                                                                       \
+-     lws_ret;                                                         \
++     (__typeof (oldval)) lws_ret;                                     \
+    })
+ 
+ # define atomic_compare_and_exchange_bool_acq(mem, newval, oldval)    \

Added: 
glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-inline-syscall.diff
===================================================================
--- 
glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-inline-syscall.diff   
                            (rev 0)
+++ 
glibc-package/branches/glibc-2.21/debian/patches/hppa/cvs-inline-syscall.diff   
    2015-10-30 10:30:59 UTC (rev 6676)
@@ -0,0 +1,116 @@
+2015-08-09  John David Anglin  <[email protected]>
+
+       [BZ #18480]
+       * sysdeps/unix/sysv/linux/hppa/sysdep.h (LOAD_ARGS_0, LOAD_ARGS_1,
+       LOAD_ARGS_2, LOAD_ARGS_3, LOAD_ARGS_4, LOAD_ARGS_5, LOAD_ARGS_6):
+       Define.
+       (LOAD_REGS_0, LOAD_REGS_1, LOAD_REGS_2, LOAD_REGS_3, LOAD_REGS_4,
+       LOAD_REGS_5, LOAD_REGS_6): Update.
+       (INTERNAL_SYSCALL): Update using new LOAD defines.
+       (INTERNAL_SYSCALL_NCS): Likewise.
+       * sysdeps/unix/sysv/linux/hppa/syscall.c (syscall): Likewise.
+
+--- a/sysdeps/unix/sysv/linux/hppa/syscall.c
++++ b/sysdeps/unix/sysv/linux/hppa/syscall.c
+@@ -43,9 +43,10 @@ syscall (long int __sysno, ...)
+   va_end (args);
+ 
+   {
++    LOAD_ARGS_6 (arg0, arg1, arg2, arg3, arg4, arg5)
+     register unsigned long int __res asm("r28");
+     PIC_REG_DEF
+-    LOAD_ARGS_6 (arg0, arg1, arg2, arg3, arg4, arg5)
++    LOAD_REGS_6
+     asm volatile (SAVE_ASM_PIC
+                 "     ble  0x100(%%sr2, %%r0) \n"
+                 "     copy %1, %%r20          \n"
+--- a/sysdeps/unix/sysv/linux/hppa/sysdep.h
++++ b/sysdeps/unix/sysv/linux/hppa/sysdep.h
+@@ -400,9 +400,10 @@ L(pre_end):                                       
ASM_LINE_SEP    \
+ ({                                                                    \
+       long __sys_res;                                                 \
+       {                                                               \
++              LOAD_ARGS_##nr(args)                                    \
+               register unsigned long __res asm("r28");                \
+               PIC_REG_DEF                                             \
+-              LOAD_ARGS_##nr(args)                                    \
++              LOAD_REGS_##nr                                          \
+               /* FIXME: HACK save/load r19 around syscall */          \
+               asm volatile(                                           \
+                       SAVE_ASM_PIC                                    \
+@@ -425,9 +426,10 @@ L(pre_end):                                       
ASM_LINE_SEP    \
+ ({                                                                    \
+       long __sys_res;                                                 \
+       {                                                               \
++              LOAD_ARGS_##nr(args)                                    \
+               register unsigned long __res asm("r28");                \
+               PIC_REG_DEF                                             \
+-              LOAD_ARGS_##nr(args)                                    \
++              LOAD_REGS_##nr                                          \
+               /* FIXME: HACK save/load r19 around syscall */          \
+               asm volatile(                                           \
+                       SAVE_ASM_PIC                                    \
+@@ -443,27 +445,44 @@ L(pre_end):                                      
ASM_LINE_SEP    \
+       __sys_res;                                                      \
+  })
+ 
+-
+-
+ #define LOAD_ARGS_0()
+-#define LOAD_ARGS_1(r26)                                              \
+-  register unsigned long __r26 __asm__("r26") = (unsigned long)(r26); \
++#define LOAD_REGS_0
++#define LOAD_ARGS_1(a1)                                                       
\
++  register unsigned long __x26 = (unsigned long)(a1);                 \
+   LOAD_ARGS_0()
+-#define LOAD_ARGS_2(r26,r25)                                          \
+-  register unsigned long __r25 __asm__("r25") = (unsigned long)(r25); \
+-  LOAD_ARGS_1(r26)
+-#define LOAD_ARGS_3(r26,r25,r24)                                      \
+-  register unsigned long __r24 __asm__("r24") = (unsigned long)(r24); \
+-  LOAD_ARGS_2(r26,r25)
+-#define LOAD_ARGS_4(r26,r25,r24,r23)                                  \
+-  register unsigned long __r23 __asm__("r23") = (unsigned long)(r23); \
+-  LOAD_ARGS_3(r26,r25,r24)
+-#define LOAD_ARGS_5(r26,r25,r24,r23,r22)                              \
+-  register unsigned long __r22 __asm__("r22") = (unsigned long)(r22); \
+-  LOAD_ARGS_4(r26,r25,r24,r23)
+-#define LOAD_ARGS_6(r26,r25,r24,r23,r22,r21)                          \
+-  register unsigned long __r21 __asm__("r21") = (unsigned long)(r21); \
+-  LOAD_ARGS_5(r26,r25,r24,r23,r22)
++#define LOAD_REGS_1                                                   \
++  register unsigned long __r26 __asm__("r26") = __x26;                        
\
++  LOAD_REGS_0
++#define LOAD_ARGS_2(a1,a2)                                            \
++  register unsigned long __x25 = (unsigned long)(a2);                 \
++  LOAD_ARGS_1(a1)
++#define LOAD_REGS_2                                                   \
++  register unsigned long __r25 __asm__("r25") = __x25;                        
\
++  LOAD_REGS_1
++#define LOAD_ARGS_3(a1,a2,a3)                                         \
++  register unsigned long __x24 = (unsigned long)(a3);                 \
++  LOAD_ARGS_2(a1,a2)
++#define LOAD_REGS_3                                                   \
++  register unsigned long __r24 __asm__("r24") = __x24;                        
\
++  LOAD_REGS_2
++#define LOAD_ARGS_4(a1,a2,a3,a4)                                      \
++  register unsigned long __x23 = (unsigned long)(a4);                 \
++  LOAD_ARGS_3(a1,a2,a3)
++#define LOAD_REGS_4                                                   \
++  register unsigned long __r23 __asm__("r23") = __x23;                        
\
++  LOAD_REGS_3
++#define LOAD_ARGS_5(a1,a2,a3,a4,a5)                                   \
++  register unsigned long __x22 = (unsigned long)(a5);                 \
++  LOAD_ARGS_4(a1,a2,a3,a4)
++#define LOAD_REGS_5                                                   \
++  register unsigned long __r22 __asm__("r22") = __x22;                        
\
++  LOAD_REGS_4
++#define LOAD_ARGS_6(a1,a2,a3,a4,a5,a6)                                        
\
++  register unsigned long __x21 = (unsigned long)(a6);                 \
++  LOAD_ARGS_5(a1,a2,a3,a4,a5)
++#define LOAD_REGS_6                                                   \
++  register unsigned long __r21 __asm__("r21") = __x21;                        
\
++  LOAD_REGS_5
+ 
+ /* Even with zero args we use r20 for the syscall number */
+ #define ASM_ARGS_0

Modified: glibc-package/branches/glibc-2.21/debian/patches/series
===================================================================
--- glibc-package/branches/glibc-2.21/debian/patches/series     2015-10-30 
10:12:57 UTC (rev 6675)
+++ glibc-package/branches/glibc-2.21/debian/patches/series     2015-10-30 
10:30:59 UTC (rev 6676)
@@ -69,6 +69,8 @@
 hppa/local-setcontext.diff
 hppa/cvs-start.diff
 hppa/cvs-alloca-werror.diff
+hppa/cvs-atomic.diff
+hppa/cvs-inline-syscall.diff
 
 hurd-i386/local-enable-ldconfig.diff
 hurd-i386/tg-context_functions.diff

Reply via email to