I've committed the following to master, and backported to gcc-15 and gcc-14
after removing the mf8 and sve2 testsuite parts respectively.

---

An architectural relaxation in 2024 (listed in the "Known issues in
Issue K.a" of the Arm ARM) removed support for revd to be prefixed by a
movprfx instruction.  This patch removes this (now invalid) codegen.

The patch also makes identical changes to each revd* testsuite file.
Interestingly, this actually improves codegen for one of the three
functions.


gcc/ChangeLog:

        * config/aarch64/aarch64-sve2.md (@aarch64_pred_revd<mode>):
        Remove movprfx alternative.
        (@cond_revd<mode>): Likewise.

gcc/testsuite/ChangeLog:

        * gcc.target/aarch64/sme/acle-asm/revd_bf16.c: Update checks.
        * gcc.target/aarch64/sme/acle-asm/revd_f16.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_f32.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_f64.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_mf8.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_s16.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_s32.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_s64.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_s8.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_u16.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_u32.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_u64.c: Likewise.
        * gcc.target/aarch64/sme/acle-asm/revd_u8.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_bf16.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_f16.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_f32.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_f64.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_mf8.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_s16.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_s32.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_s64.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_s8.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_u16.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_u32.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_u64.c: Likewise.
        * gcc.target/aarch64/sve2/acle/asm/revd_u8.c: Likewise.


diff --git a/gcc/config/aarch64/aarch64-sve2.md 
b/gcc/config/aarch64/aarch64-sve2.md
index 
127754bb951b722c50a56d5cb87a9223a4b52db0..4ceb8c784d93d95282eff197d24765b2459240c5
 100644
--- a/gcc/config/aarch64/aarch64-sve2.md
+++ b/gcc/config/aarch64/aarch64-sve2.md
@@ -4129,9 +4129,8 @@
             UNSPEC_REVD_ONLY)]
          UNSPEC_PRED_X))]
   "TARGET_SVE2p1_OR_SME"
-  {@ [ cons: =0 , 1   , 2 ; attrs: movprfx ]
-     [ w        , Upl , 0 ; *              ] revd\t%0.q, %1/m, %2.q
-     [ ?&w      , Upl , w ; yes            ] movprfx\t%0, %2\;revd\t%0.q, 
%1/m, %2.q
+  {@ [ cons: =0 , 1   , 2 ]
+     [ w        , Upl , 0 ] revd\t%0.q, %1/m, %2.q
   }
   [(set_attr "sve_type" "sve_int_general")]
 )
@@ -4146,9 +4145,8 @@
           (match_operand:SVE_FULL 3 "register_operand")]
          UNSPEC_SEL))]
   "TARGET_SVE2p1_OR_SME"
-  {@ [ cons: =0 , 1   , 2 , 3  ; attrs: movprfx ]
-     [ w        , Upl , w , 0  ; *              ] revd\t%0.q, %1/m, %2.q
-     [ ?&w      , Upl , w , w  ; yes            ] movprfx\t%0, %3\;revd\t%0.q, 
%1/m, %2.q
+  {@ [ cons: =0 , 1   , 2 , 3 ]
+     [ w        , Upl , w , 0 ] revd\t%0.q, %1/m, %2.q
   }
   [(set_attr "sve_type" "sve_int_general")]
 )
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_bf16.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_bf16.c
index 
c8f0dc5f02b1eb1a878efb2090b403f05b6f72f5..fce67e77ef6016bc4819116f2aabfb713935a644
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_bf16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_bf16.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_bf16_m_tied1, svbfloat16_t,
 
 /*
 ** revd_bf16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_bf16_m_tied2, svbfloat16_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_bf16_m_tied2, svbfloat16_t,
 
 /*
 ** revd_bf16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_bf16_m_untied, svbfloat16_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_bf16_x_tied1, svbfloat16_t,
 
 /*
 ** revd_bf16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_bf16_x_untied, svbfloat16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f16.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f16.c
index 
a02e2d47f2a33bfd652058f5bd80a30df1bd391a..3df01eb1030e90ec7a757e5ead97259d23c068ab
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f16.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_f16_m_tied1, svfloat16_t,
 
 /*
 ** revd_f16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f16_m_tied2, svfloat16_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_f16_m_tied2, svfloat16_t,
 
 /*
 ** revd_f16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f16_m_untied, svfloat16_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_f16_x_tied1, svfloat16_t,
 
 /*
 ** revd_f16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f16_x_untied, svfloat16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f32.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f32.c
index 
28cdba50a89121eaa6d7f28cfb3b88f7541367c2..40cc7f94cef2d45c871befd1d6115854aeaf55a9
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f32.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_f32_m_tied1, svfloat32_t,
 
 /*
 ** revd_f32_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f32_m_tied2, svfloat32_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_f32_m_tied2, svfloat32_t,
 
 /*
 ** revd_f32_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f32_m_untied, svfloat32_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_f32_x_tied1, svfloat32_t,
 
 /*
 ** revd_f32_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f32_x_untied, svfloat32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f64.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f64.c
index 
3f949b87e1c809919753a2d4de55696d44b62978..b91982c761f2a4d4b80d80c4b8614dd98595ce50
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_f64.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_f64_m_tied1, svfloat64_t,
 
 /*
 ** revd_f64_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f64_m_tied2, svfloat64_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_f64_m_tied2, svfloat64_t,
 
 /*
 ** revd_f64_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f64_m_untied, svfloat64_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_f64_x_tied1, svfloat64_t,
 
 /*
 ** revd_f64_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f64_x_untied, svfloat64_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_mf8.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_mf8.c
index 
611714b539bdb6a855af65c17475f1859e92305a..a6d58e2be316e0d2a380cb7ac0e9df91309e1c61
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_mf8.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_mf8.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_mf8_m_tied1, svmfloat8_t,
 
 /*
 ** revd_mf8_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_mf8_m_tied2, svmfloat8_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_mf8_m_tied2, svmfloat8_t,
 
 /*
 ** revd_mf8_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_mf8_m_untied, svmfloat8_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_mf8_x_tied1, svmfloat8_t,
 
 /*
 ** revd_mf8_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_mf8_x_untied, svmfloat8_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s16.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s16.c
index 
621ae022592e31fde8b43b9081996c1ba61b89c8..cdf27aad4b05c6dc7ed85144b698eac31d732fc3
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s16.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_s16_m_tied1, svint16_t,
 
 /*
 ** revd_s16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s16_m_tied2, svint16_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_s16_m_tied2, svint16_t,
 
 /*
 ** revd_s16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s16_m_untied, svint16_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_s16_x_tied1, svint16_t,
 
 /*
 ** revd_s16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s16_x_untied, svint16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s32.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s32.c
index 
d388108a8c9a37949a3edf689a2b43416d7f1944..33a52291a7afc17f5b35f9a00d3f62a54de2ed98
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s32.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_s32_m_tied1, svint32_t,
 
 /*
 ** revd_s32_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s32_m_tied2, svint32_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_s32_m_tied2, svint32_t,
 
 /*
 ** revd_s32_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s32_m_untied, svint32_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_s32_x_tied1, svint32_t,
 
 /*
 ** revd_s32_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s32_x_untied, svint32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s64.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s64.c
index 
f5dbbaa44228494f504f0c37e3d5f179c015147e..5d0bd19512ac60cdf4f8e95f409a1e8077bc66c6
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s64.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_s64_m_tied1, svint64_t,
 
 /*
 ** revd_s64_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s64_m_tied2, svint64_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_s64_m_tied2, svint64_t,
 
 /*
 ** revd_s64_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s64_m_untied, svint64_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_s64_x_tied1, svint64_t,
 
 /*
 ** revd_s64_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s64_x_untied, svint64_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s8.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s8.c
index 
663cd418b16cd07356bc4442f3243206d995b0ae..b4557926fa09353744ba62a9c2c8d933c1936201
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s8.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_s8.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_s8_m_tied1, svint8_t,
 
 /*
 ** revd_s8_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s8_m_tied2, svint8_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_s8_m_tied2, svint8_t,
 
 /*
 ** revd_s8_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s8_m_untied, svint8_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_s8_x_tied1, svint8_t,
 
 /*
 ** revd_s8_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s8_x_untied, svint8_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u16.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u16.c
index 
83a414d020d3169a9e635cfb5100652b134487dc..b4b6a149845cb8e70edfbda5eb59776bed8b8c5f
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u16.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_u16_m_tied1, svuint16_t,
 
 /*
 ** revd_u16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u16_m_tied2, svuint16_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_u16_m_tied2, svuint16_t,
 
 /*
 ** revd_u16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u16_m_untied, svuint16_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_u16_x_tied1, svuint16_t,
 
 /*
 ** revd_u16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u16_x_untied, svuint16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u32.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u32.c
index 
5ba00cb806532d84164bc0d5ca1b4feb48c03b2b..6066600b87327826854e235766ba6d3074fb0856
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u32.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_u32_m_tied1, svuint32_t,
 
 /*
 ** revd_u32_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u32_m_tied2, svuint32_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_u32_m_tied2, svuint32_t,
 
 /*
 ** revd_u32_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u32_m_untied, svuint32_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_u32_x_tied1, svuint32_t,
 
 /*
 ** revd_u32_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u32_x_untied, svuint32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u64.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u64.c
index 
0c016c7c398c9c3e82619b2f6e56715aba0679d7..1f7915e739ed1657cc39be039bc4c4a57215d0b9
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u64.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_u64_m_tied1, svuint64_t,
 
 /*
 ** revd_u64_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u64_m_tied2, svuint64_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_u64_m_tied2, svuint64_t,
 
 /*
 ** revd_u64_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u64_m_untied, svuint64_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_u64_x_tied1, svuint64_t,
 
 /*
 ** revd_u64_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u64_x_untied, svuint64_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u8.c 
b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u8.c
index 
b9f5935873ac94b9441f3496e29f19153c3c3ea4..d2cf360d0ee9879affb5892ce6223948e257ebe3
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u8.c
+++ b/gcc/testsuite/gcc.target/aarch64/sme/acle-asm/revd_u8.c
@@ -22,9 +22,8 @@ TEST_UNIFORM_Z (revd_u8_m_tied1, svuint8_t,
 
 /*
 ** revd_u8_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u8_m_tied2, svuint8_t,
@@ -33,8 +32,8 @@ TEST_UNIFORM_Z (revd_u8_m_tied2, svuint8_t,
 
 /*
 ** revd_u8_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u8_m_untied, svuint8_t,
@@ -67,8 +66,8 @@ TEST_UNIFORM_Z (revd_u8_x_tied1, svuint8_t,
 
 /*
 ** revd_u8_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u8_x_untied, svuint8_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_bf16.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_bf16.c
index 
93d60cbe7d145376246cc2d5897bc71690bf6e44..01fd949043322078c81668d33bcaae98bb08edb5
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_bf16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_bf16.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_bf16_m_tied1, svbfloat16_t,
 
 /*
 ** revd_bf16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_bf16_m_tied2, svbfloat16_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_bf16_m_tied2, svbfloat16_t,
 
 /*
 ** revd_bf16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_bf16_m_untied, svbfloat16_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_bf16_x_tied1, svbfloat16_t,
 
 /*
 ** revd_bf16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_bf16_x_untied, svbfloat16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f16.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f16.c
index 
b8a5f7b8e7726c17bae85510bc41b98eaefd863d..ac2acf36ced232b4e312bf620609ee1b78859c30
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f16.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_f16_m_tied1, svfloat16_t,
 
 /*
 ** revd_f16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f16_m_tied2, svfloat16_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_f16_m_tied2, svfloat16_t,
 
 /*
 ** revd_f16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f16_m_untied, svfloat16_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_f16_x_tied1, svfloat16_t,
 
 /*
 ** revd_f16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f16_x_untied, svfloat16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f32.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f32.c
index 
d89eb5f5e39e5c0205daeddfcc359cc725bc11fe..03134374a410a8a40768168b4a5603cd419e49fe
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f32.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_f32_m_tied1, svfloat32_t,
 
 /*
 ** revd_f32_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f32_m_tied2, svfloat32_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_f32_m_tied2, svfloat32_t,
 
 /*
 ** revd_f32_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f32_m_untied, svfloat32_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_f32_x_tied1, svfloat32_t,
 
 /*
 ** revd_f32_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f32_x_untied, svfloat32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f64.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f64.c
index 
0d80375a86056a6019a972f428d497d560792423..a1499a675b95148ed74d7bf162703ca7cd7e3ef0
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_f64.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_f64_m_tied1, svfloat64_t,
 
 /*
 ** revd_f64_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f64_m_tied2, svfloat64_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_f64_m_tied2, svfloat64_t,
 
 /*
 ** revd_f64_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f64_m_untied, svfloat64_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_f64_x_tied1, svfloat64_t,
 
 /*
 ** revd_f64_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_f64_x_untied, svfloat64_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_mf8.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_mf8.c
index 
64d08509c16d7fc95ce755eab70da47fb407810a..c5409ddfbe7d9c902987481e5cf110c324fee460
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_mf8.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_mf8.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_mf8_m_tied1, svmfloat8_t,
 
 /*
 ** revd_mf8_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_mf8_m_tied2, svmfloat8_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_mf8_m_tied2, svmfloat8_t,
 
 /*
 ** revd_mf8_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_mf8_m_untied, svmfloat8_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_mf8_x_tied1, svmfloat8_t,
 
 /*
 ** revd_mf8_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_mf8_x_untied, svmfloat8_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s16.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s16.c
index 
e12690461bbadd2d8b5238dbaa6d354109337383..db3a660830cfd02d83505a382761f7568ce07a30
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s16.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_s16_m_tied1, svint16_t,
 
 /*
 ** revd_s16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s16_m_tied2, svint16_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_s16_m_tied2, svint16_t,
 
 /*
 ** revd_s16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s16_m_untied, svint16_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_s16_x_tied1, svint16_t,
 
 /*
 ** revd_s16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s16_x_untied, svint16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s32.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s32.c
index 
86bb08de84910774a2a6a991dd3c0cdf12075bf5..e4c00d6869ed8150220798b7159fe2854c595487
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s32.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_s32_m_tied1, svint32_t,
 
 /*
 ** revd_s32_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s32_m_tied2, svint32_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_s32_m_tied2, svint32_t,
 
 /*
 ** revd_s32_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s32_m_untied, svint32_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_s32_x_tied1, svint32_t,
 
 /*
 ** revd_s32_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s32_x_untied, svint32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s64.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s64.c
index 
e6bcab893b713f87685d3bbd0189e4beb3f18f9c..37fc77933ef8724b92f4a4dc42503fef2e86f24a
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s64.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_s64_m_tied1, svint64_t,
 
 /*
 ** revd_s64_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s64_m_tied2, svint64_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_s64_m_tied2, svint64_t,
 
 /*
 ** revd_s64_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s64_m_untied, svint64_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_s64_x_tied1, svint64_t,
 
 /*
 ** revd_s64_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s64_x_untied, svint64_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s8.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s8.c
index 
07925ed783b6239f9286f1856bb6eda63a04ba10..dba6197a59e07fde797005784e1abe996836ad3e
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s8.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_s8.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_s8_m_tied1, svint8_t,
 
 /*
 ** revd_s8_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s8_m_tied2, svint8_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_s8_m_tied2, svint8_t,
 
 /*
 ** revd_s8_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s8_m_untied, svint8_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_s8_x_tied1, svint8_t,
 
 /*
 ** revd_s8_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_s8_x_untied, svint8_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u16.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u16.c
index 
0617a3db9ecac21625be95cda05d345c359c6ac9..e93bf5b257e91f0908caf852d8bab3cf63b85acb
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u16.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_u16_m_tied1, svuint16_t,
 
 /*
 ** revd_u16_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u16_m_tied2, svuint16_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_u16_m_tied2, svuint16_t,
 
 /*
 ** revd_u16_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u16_m_untied, svuint16_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_u16_x_tied1, svuint16_t,
 
 /*
 ** revd_u16_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u16_x_untied, svuint16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u32.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u32.c
index 
2de978e3070f575032ff077afaa0bb12fc09057e..a846359aeca6e2b5a8876ac6c44d9cb3e2d8eadb
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u32.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_u32_m_tied1, svuint32_t,
 
 /*
 ** revd_u32_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u32_m_tied2, svuint32_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_u32_m_tied2, svuint32_t,
 
 /*
 ** revd_u32_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u32_m_untied, svuint32_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_u32_x_tied1, svuint32_t,
 
 /*
 ** revd_u32_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u32_x_untied, svuint32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u64.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u64.c
index 
112d381d9d838dd3be171b9f21c3e587cfff5100..729d8fa7dbda90034bfd1d4f0ee1ee8bdcf68693
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u64.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_u64_m_tied1, svuint64_t,
 
 /*
 ** revd_u64_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u64_m_tied2, svuint64_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_u64_m_tied2, svuint64_t,
 
 /*
 ** revd_u64_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u64_m_untied, svuint64_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_u64_x_tied1, svuint64_t,
 
 /*
 ** revd_u64_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u64_x_untied, svuint64_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u8.c 
b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u8.c
index 
5f29eecc0242cc6676f4b9fdf682fadea2add8ac..7d08d577bdc889dd01ee71e0d39c1e89f6f500b8
 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u8.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/revd_u8.c
@@ -26,9 +26,8 @@ TEST_UNIFORM_Z (revd_u8_m_tied1, svuint8_t,
 
 /*
 ** revd_u8_m_tied2:
-**     mov     (z[0-9]+)\.d, z0\.d
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, \1\.q
+**     revd    z1\.q, p0/m, z0\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u8_m_tied2, svuint8_t,
@@ -37,8 +36,8 @@ TEST_UNIFORM_Z (revd_u8_m_tied2, svuint8_t,
 
 /*
 ** revd_u8_m_untied:
-**     movprfx z0, z2
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z2\.q, p0/m, z1\.q
+**     mov     z0.d, z2.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u8_m_untied, svuint8_t,
@@ -71,8 +70,8 @@ TEST_UNIFORM_Z (revd_u8_x_tied1, svuint8_t,
 
 /*
 ** revd_u8_x_untied:
-**     movprfx z0, z1
-**     revd    z0\.q, p0/m, z1\.q
+**     revd    z1\.q, p0/m, z1\.q
+**     mov     z0.d, z1.d
 **     ret
 */
 TEST_UNIFORM_Z (revd_u8_x_untied, svuint8_t,

Reply via email to