Author: jhibbits
Date: Fri Mar  6 01:45:03 2020
New Revision: 358688
URL: https://svnweb.freebsd.org/changeset/base/358688

Log:
  powerpc/powerpc64: Enforce natural alignment in memcpy
  
  Summary:
  POWER architecture CPUs (Book-S) require natural alignment for
  cache-inhibited storage accesses.  Since we can't know the caching model
  for a page ahead of time, always enforce natural alignment in memcpy.
  This fixes a SIGBUS in X with acceleration enabled on POWER9.
  
  As part of this, revert r358672, it's no longer necessary with this fix.
  
  Regression tested by alfredo.
  
  Reviewed by: alfredo
  Differential Revision: https://reviews.freebsd.org/D23969

Modified:
  head/lib/libc/powerpc64/string/bcopy_resolver.c
  head/lib/libc/powerpc64/string/memcpy.S
  head/lib/libc/powerpc64/string/memcpy_vsx.S

Modified: head/lib/libc/powerpc64/string/bcopy_resolver.c
==============================================================================
--- head/lib/libc/powerpc64/string/bcopy_resolver.c     Thu Mar  5 22:45:16 
2020        (r358687)
+++ head/lib/libc/powerpc64/string/bcopy_resolver.c     Fri Mar  6 01:45:03 
2020        (r358688)
@@ -66,7 +66,7 @@ DEFINE_UIFUNC(, FN_RET, FN_NAME, FN_PARAMS)
         * Since POWER ISA 2.07B this is solved transparently
         * by the hardware
         */
-       if (cpu_features2 & PPC_FEATURE2_ARCH_2_07)
+       if (cpu_features2 & PPC_FEATURE_HAS_VSX)
                return (FN_NAME_VSX);
        else
                return (FN_NAME_NOVSX);

Modified: head/lib/libc/powerpc64/string/memcpy.S
==============================================================================
--- head/lib/libc/powerpc64/string/memcpy.S     Thu Mar  5 22:45:16 2020        
(r358687)
+++ head/lib/libc/powerpc64/string/memcpy.S     Fri Mar  6 01:45:03 2020        
(r358688)
@@ -39,6 +39,11 @@ WEAK_REFERENCE(__memcpy, memcpy);
 #define BLOCK_BYTES                    (1 << BLOCK_BITS)
 #define BLOCK_MASK                     (BLOCK_BYTES - 1)
 
+/* Minimum 8 byte alignment, to avoid cache-inhibited alignment faults. */
+#ifndef ALIGN_MASK
+#define        ALIGN_MASK                      0x7
+#endif
+
 /*
  * r3: dst
  * r4: src
@@ -48,6 +53,12 @@ ENTRY(FN_NAME)
        cmpdi   %r5, 0                  /* len == 0? nothing to do */
        beqlr-
 
+       /* If src and dst are relatively misaligned, do byte copies. */
+       andi.   %r8, %r3, ALIGN_MASK
+       andi.   %r7, %r4, ALIGN_MASK
+       cmpd    %r8, %r7
+       mr      %r7, %r5
+       bne+    .Lcopy_remaining_fix_index_byte
        mr      %r8, %r3                /* save dst */
 
        /* align src */

Modified: head/lib/libc/powerpc64/string/memcpy_vsx.S
==============================================================================
--- head/lib/libc/powerpc64/string/memcpy_vsx.S Thu Mar  5 22:45:16 2020        
(r358687)
+++ head/lib/libc/powerpc64/string/memcpy_vsx.S Fri Mar  6 01:45:03 2020        
(r358688)
@@ -30,6 +30,7 @@
 
 #define FN_NAME                        __memcpy_vsx
 #define BLOCK_BITS             6
+#define        ALIGN_MASK              0xf
 
 /*
  * r5: bytes to copy (multiple of BLOCK_BYTES)
_______________________________________________
svn-src-head@freebsd.org mailing list
https://lists.freebsd.org/mailman/listinfo/svn-src-head
To unsubscribe, send any mail to "svn-src-head-unsubscr...@freebsd.org"

Reply via email to