PR83399 shows a problem where we emit an altivec load using a builtin
that forces us to use a specific altivec load pattern.  The generated
rtl pattern has a use of sfp (frame pointer) and during LRA, we eliminate
it's use to the sp (lra-eliminations.c:process_insn_for_elimination).
During this process, we re-recog the insn and end up matching a different
vsx pattern, because it exists earlier in the machine description file.
That vsx pattern uses a "Z" constraint for its address operand, which will
not accept the "special" altivec address we have, but the memory_operand
predicate the pattern uses allows it.  The recog'ing to a different pattern
than we want, causes us to ICE later on.

The solution here is to tighten the predicate used for the address in the
vsx pattern to use the indexed_or_indirect_operand instead, which will
reject the altivec address our rtl pattern has.

Once this is fixed, we end up hitting another issue in print_operand when
outputing altivec addresses when using -mvsx.  This was fixed by allowing
both ALTIVEC or VSX VECTOR MEMs.

This passed bootstrap and regtesting on powerpc64le-linux with no regressions.
Ok for trunk?

Is this ok for the open release branches too, once testing has completed there?

Peter


gcc/
        PR target/83399
        * config/rs6000/rs6000.c (print_operand): Use
        VECTOR_MEM_ALTIVEC_OR_VSX_P.
        * config/rs6000/vsx.md (*vsx_le_perm_load_<mode><VSX_D>): Use
        indexed_or_indirect_operand predicate.
        (*vsx_le_perm_load_<mode><VSX_W>): Likewise.
        (*vsx_le_perm_load_v8hi): Likewise.
        (*vsx_le_perm_load_v8hi): Likewise.
        (*vsx_le_perm_load_v16qi): Likewise.
        (*vsx_le_perm_store_<mode><VSX_D>): Likewise in pattern and splitters.
        (*vsx_le_perm_store_<mode><VSX_W>): Likewise.
        (*vsx_le_perm_store_v8hi): Likewise.
        (*vsx_le_perm_store_v16qi): Likewise.

gcc/testsuite/
        PR target/83399
        * gcc.target/powerpc/pr83399.c: New test.

Index: gcc/config/rs6000/rs6000.c
===================================================================
--- gcc/config/rs6000/rs6000.c  (revision 256351)
+++ gcc/config/rs6000/rs6000.c  (working copy)
@@ -21671,7 +21671,7 @@ print_operand (FILE *file, rtx x, int co
 
        tmp = XEXP (x, 0);
 
-       if (VECTOR_MEM_ALTIVEC_P (GET_MODE (x))
+       if (VECTOR_MEM_ALTIVEC_OR_VSX_P (GET_MODE (x))
            && GET_CODE (tmp) == AND
            && GET_CODE (XEXP (tmp, 1)) == CONST_INT
            && INTVAL (XEXP (tmp, 1)) == -16)
Index: gcc/config/rs6000/vsx.md
===================================================================
--- gcc/config/rs6000/vsx.md    (revision 256351)
+++ gcc/config/rs6000/vsx.md    (working copy)
@@ -430,7 +430,7 @@ (define_c_enum "unspec"
 ;; VSX moves so they match first.
 (define_insn_and_split "*vsx_le_perm_load_<mode>"
   [(set (match_operand:VSX_D 0 "vsx_register_operand" "=<VSa>")
-        (match_operand:VSX_D 1 "memory_operand" "Z"))]
+        (match_operand:VSX_D 1 "indexed_or_indirect_operand" "Z"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
@@ -453,7 +453,7 @@ (define_insn_and_split "*vsx_le_perm_loa
 
 (define_insn_and_split "*vsx_le_perm_load_<mode>"
   [(set (match_operand:VSX_W 0 "vsx_register_operand" "=<VSa>")
-        (match_operand:VSX_W 1 "memory_operand" "Z"))]
+        (match_operand:VSX_W 1 "indexed_or_indirect_operand" "Z"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
@@ -478,7 +478,7 @@ (define_insn_and_split "*vsx_le_perm_loa
 
 (define_insn_and_split "*vsx_le_perm_load_v8hi"
   [(set (match_operand:V8HI 0 "vsx_register_operand" "=wa")
-        (match_operand:V8HI 1 "memory_operand" "Z"))]
+        (match_operand:V8HI 1 "indexed_or_indirect_operand" "Z"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
@@ -507,7 +507,7 @@ (define_insn_and_split "*vsx_le_perm_loa
 
 (define_insn_and_split "*vsx_le_perm_load_v16qi"
   [(set (match_operand:V16QI 0 "vsx_register_operand" "=wa")
-        (match_operand:V16QI 1 "memory_operand" "Z"))]
+        (match_operand:V16QI 1 "indexed_or_indirect_operand" "Z"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
@@ -543,7 +543,7 @@ (define_insn_and_split "*vsx_le_perm_loa
    (set_attr "length" "8")])
 
 (define_insn "*vsx_le_perm_store_<mode>"
-  [(set (match_operand:VSX_D 0 "memory_operand" "=Z")
+  [(set (match_operand:VSX_D 0 "indexed_or_indirect_operand" "=Z")
         (match_operand:VSX_D 1 "vsx_register_operand" "+<VSa>"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
@@ -551,7 +551,7 @@ (define_insn "*vsx_le_perm_store_<mode>"
    (set_attr "length" "12")])
 
 (define_split
-  [(set (match_operand:VSX_D 0 "memory_operand" "")
+  [(set (match_operand:VSX_D 0 "indexed_or_indirect_operand" "")
         (match_operand:VSX_D 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && !reload_completed"
   [(set (match_dup 2)
@@ -570,7 +570,7 @@ (define_split
 ;; The post-reload split requires that we re-permute the source
 ;; register in case it is still live.
 (define_split
-  [(set (match_operand:VSX_D 0 "memory_operand" "")
+  [(set (match_operand:VSX_D 0 "indexed_or_indirect_operand" "")
         (match_operand:VSX_D 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && reload_completed"
   [(set (match_dup 1)
@@ -588,7 +588,7 @@ (define_split
   "")
 
 (define_insn "*vsx_le_perm_store_<mode>"
-  [(set (match_operand:VSX_W 0 "memory_operand" "=Z")
+  [(set (match_operand:VSX_W 0 "indexed_or_indirect_operand" "=Z")
         (match_operand:VSX_W 1 "vsx_register_operand" "+<VSa>"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
@@ -596,7 +596,7 @@ (define_insn "*vsx_le_perm_store_<mode>"
    (set_attr "length" "12")])
 
 (define_split
-  [(set (match_operand:VSX_W 0 "memory_operand" "")
+  [(set (match_operand:VSX_W 0 "indexed_or_indirect_operand" "")
         (match_operand:VSX_W 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && !reload_completed"
   [(set (match_dup 2)
@@ -617,7 +617,7 @@ (define_split
 ;; The post-reload split requires that we re-permute the source
 ;; register in case it is still live.
 (define_split
-  [(set (match_operand:VSX_W 0 "memory_operand" "")
+  [(set (match_operand:VSX_W 0 "indexed_or_indirect_operand" "")
         (match_operand:VSX_W 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && reload_completed"
   [(set (match_dup 1)
@@ -638,7 +638,7 @@ (define_split
   "")
 
 (define_insn "*vsx_le_perm_store_v8hi"
-  [(set (match_operand:V8HI 0 "memory_operand" "=Z")
+  [(set (match_operand:V8HI 0 "indexed_or_indirect_operand" "=Z")
         (match_operand:V8HI 1 "vsx_register_operand" "+wa"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
@@ -646,7 +646,7 @@ (define_insn "*vsx_le_perm_store_v8hi"
    (set_attr "length" "12")])
 
 (define_split
-  [(set (match_operand:V8HI 0 "memory_operand" "")
+  [(set (match_operand:V8HI 0 "indexed_or_indirect_operand" "")
         (match_operand:V8HI 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && !reload_completed"
   [(set (match_dup 2)
@@ -671,7 +671,7 @@ (define_split
 ;; The post-reload split requires that we re-permute the source
 ;; register in case it is still live.
 (define_split
-  [(set (match_operand:V8HI 0 "memory_operand" "")
+  [(set (match_operand:V8HI 0 "indexed_or_indirect_operand" "")
         (match_operand:V8HI 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && reload_completed"
   [(set (match_dup 1)
@@ -698,7 +698,7 @@ (define_split
   "")
 
 (define_insn "*vsx_le_perm_store_v16qi"
-  [(set (match_operand:V16QI 0 "memory_operand" "=Z")
+  [(set (match_operand:V16QI 0 "indexed_or_indirect_operand" "=Z")
         (match_operand:V16QI 1 "vsx_register_operand" "+wa"))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR"
   "#"
@@ -706,7 +706,7 @@ (define_insn "*vsx_le_perm_store_v16qi"
    (set_attr "length" "12")])
 
 (define_split
-  [(set (match_operand:V16QI 0 "memory_operand" "")
+  [(set (match_operand:V16QI 0 "indexed_or_indirect_operand" "")
         (match_operand:V16QI 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && !reload_completed"
   [(set (match_dup 2)
@@ -739,7 +739,7 @@ (define_split
 ;; The post-reload split requires that we re-permute the source
 ;; register in case it is still live.
 (define_split
-  [(set (match_operand:V16QI 0 "memory_operand" "")
+  [(set (match_operand:V16QI 0 "indexed_or_indirect_operand" "")
         (match_operand:V16QI 1 "vsx_register_operand" ""))]
   "!BYTES_BIG_ENDIAN && TARGET_VSX && !TARGET_P9_VECTOR && reload_completed"
   [(set (match_dup 1)
Index: gcc/testsuite/gcc.target/powerpc/pr83399.c
===================================================================
--- gcc/testsuite/gcc.target/powerpc/pr83399.c  (nonexistent)
+++ gcc/testsuite/gcc.target/powerpc/pr83399.c  (working copy)
@@ -0,0 +1,15 @@
+/* PR target/83399 */
+/* { dg-do compile } */
+/* { dg-require-effective-target powerpc_vsx_ok } */
+/* { dg-options "-O1 -mabi=elfv2 -mlittle -mvsx" } */
+
+typedef __attribute__((altivec(vector__))) int v4si_t;
+int
+foo (void)
+{
+  v4si_t a, u, v, y;
+  u = __builtin_altivec_lvx (32, ((void *) &a) - 32);
+  v = __builtin_altivec_lvx (64, ((void *) &a) - 32);
+  y = u + v;
+  return y[0];
+}

Reply via email to