diff mbox series

[3/6] aarch64: Relax ordering requirements in SVE dup tests

Message ID 20230509064831.1651327-4-richard.sandiford@arm.com
State New
Headers show
Series aarch64: Avoid hard-coding specific register allocations | expand

Commit Message

Richard Sandiford May 9, 2023, 6:48 a.m. UTC
Some of the svdup tests expand to a SEL between two constant vectors.
This patch allows the constants to be formed in either order.

gcc/testsuite/
	* gcc.target/aarch64/sve/acle/asm/dup_s16.c: When using SEL to select
	between two constant vectors, allow the constant moves to appear in
	either order.
	* gcc.target/aarch64/sve/acle/asm/dup_s32.c: Likewise.
	* gcc.target/aarch64/sve/acle/asm/dup_s64.c: Likewise.
	* gcc.target/aarch64/sve/acle/asm/dup_u16.c: Likewise.
	* gcc.target/aarch64/sve/acle/asm/dup_u32.c: Likewise.
	* gcc.target/aarch64/sve/acle/asm/dup_u64.c: Likewise.
---
 .../gcc.target/aarch64/sve/acle/asm/dup_s16.c | 72 +++++++++++++++++++
 .../gcc.target/aarch64/sve/acle/asm/dup_s32.c | 60 ++++++++++++++++
 .../gcc.target/aarch64/sve/acle/asm/dup_s64.c | 60 ++++++++++++++++
 .../gcc.target/aarch64/sve/acle/asm/dup_u16.c | 72 +++++++++++++++++++
 .../gcc.target/aarch64/sve/acle/asm/dup_u32.c | 60 ++++++++++++++++
 .../gcc.target/aarch64/sve/acle/asm/dup_u64.c | 60 ++++++++++++++++
 6 files changed, 384 insertions(+)
diff mbox series

Patch

diff --git a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s16.c b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s16.c
index 21ab6f63e37..9c91a5bbad9 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s16.c
@@ -611,9 +611,15 @@  TEST_UNIFORM_Z (dup_127_s16_z, svint16_t,
 
 /*
 ** dup_128_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #128
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #128
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_128_s16_z, svint16_t,
@@ -632,9 +638,15 @@  TEST_UNIFORM_Z (dup_253_s16_z, svint16_t,
 
 /*
 ** dup_254_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #254
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #254
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_254_s16_z, svint16_t,
@@ -643,9 +655,15 @@  TEST_UNIFORM_Z (dup_254_s16_z, svint16_t,
 
 /*
 ** dup_255_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #255
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_255_s16_z, svint16_t,
@@ -663,9 +681,15 @@  TEST_UNIFORM_Z (dup_256_s16_z, svint16_t,
 
 /*
 ** dup_257_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+)\.b, #1
 **	sel	z0\.h, p0, \2\.h, \1\.h
+** |
+**	mov	(z[0-9]+)\.b, #1
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3\.h, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_257_s16_z, svint16_t,
@@ -702,9 +726,15 @@  TEST_UNIFORM_Z (dup_7ffd_s16_z, svint16_t,
 
 /*
 ** dup_7ffe_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #32766
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #32766
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7ffe_s16_z, svint16_t,
@@ -713,9 +743,15 @@  TEST_UNIFORM_Z (dup_7ffe_s16_z, svint16_t,
 
 /*
 ** dup_7fff_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #32767
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7fff_s16_z, svint16_t,
@@ -742,9 +778,15 @@  TEST_UNIFORM_Z (dup_m128_s16_z, svint16_t,
 
 /*
 ** dup_m129_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-129
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-129
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m129_s16_z, svint16_t,
@@ -763,9 +805,15 @@  TEST_UNIFORM_Z (dup_m254_s16_z, svint16_t,
 
 /*
 ** dup_m255_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-255
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m255_s16_z, svint16_t,
@@ -783,9 +831,15 @@  TEST_UNIFORM_Z (dup_m256_s16_z, svint16_t,
 
 /*
 ** dup_m257_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-257
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-257
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m257_s16_z, svint16_t,
@@ -794,9 +848,15 @@  TEST_UNIFORM_Z (dup_m257_s16_z, svint16_t,
 
 /*
 ** dup_m258_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+)\.b, #-2
 **	sel	z0\.h, p0, \2\.h, \1\.h
+** |
+**	mov	(z[0-9]+)\.b, #-2
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3\.h, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m258_s16_z, svint16_t,
@@ -828,9 +888,15 @@  TEST_UNIFORM_Z (dup_m7f00_s16_z, svint16_t,
 
 /*
 ** dup_m7f01_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-32513
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-32513
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7f01_s16_z, svint16_t,
@@ -849,9 +915,15 @@  TEST_UNIFORM_Z (dup_m7ffe_s16_z, svint16_t,
 
 /*
 ** dup_m7fff_s16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-32767
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7fff_s16_z, svint16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s32.c b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s32.c
index 500ec48b34a..1cfecd962a4 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s32.c
@@ -603,9 +603,15 @@  TEST_UNIFORM_Z (dup_127_s32_z, svint32_t,
 
 /*
 ** dup_128_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #128
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #128
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_128_s32_z, svint32_t,
@@ -624,9 +630,15 @@  TEST_UNIFORM_Z (dup_253_s32_z, svint32_t,
 
 /*
 ** dup_254_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #254
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #254
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_254_s32_z, svint32_t,
@@ -635,9 +647,15 @@  TEST_UNIFORM_Z (dup_254_s32_z, svint32_t,
 
 /*
 ** dup_255_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #255
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_255_s32_z, svint32_t,
@@ -688,9 +706,15 @@  TEST_UNIFORM_Z (dup_7ffd_s32_z, svint32_t,
 
 /*
 ** dup_7ffe_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #32766
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #32766
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7ffe_s32_z, svint32_t,
@@ -699,9 +723,15 @@  TEST_UNIFORM_Z (dup_7ffe_s32_z, svint32_t,
 
 /*
 ** dup_7fff_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #32767
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7fff_s32_z, svint32_t,
@@ -728,9 +758,15 @@  TEST_UNIFORM_Z (dup_m128_s32_z, svint32_t,
 
 /*
 ** dup_m129_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-129
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-129
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m129_s32_z, svint32_t,
@@ -749,9 +785,15 @@  TEST_UNIFORM_Z (dup_m254_s32_z, svint32_t,
 
 /*
 ** dup_m255_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-255
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m255_s32_z, svint32_t,
@@ -769,9 +811,15 @@  TEST_UNIFORM_Z (dup_m256_s32_z, svint32_t,
 
 /*
 ** dup_m257_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-257
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-257
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m257_s32_z, svint32_t,
@@ -808,9 +856,15 @@  TEST_UNIFORM_Z (dup_m7f00_s32_z, svint32_t,
 
 /*
 ** dup_m7f01_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-32513
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-32513
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7f01_s32_z, svint32_t,
@@ -829,9 +883,15 @@  TEST_UNIFORM_Z (dup_m7ffe_s32_z, svint32_t,
 
 /*
 ** dup_m7fff_s32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-32767
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7fff_s32_z, svint32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s64.c b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s64.c
index 651bb1b43f0..5189dcf590a 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_s64.c
@@ -603,9 +603,15 @@  TEST_UNIFORM_Z (dup_127_s64_z, svint64_t,
 
 /*
 ** dup_128_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #128
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #128
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_128_s64_z, svint64_t,
@@ -624,9 +630,15 @@  TEST_UNIFORM_Z (dup_253_s64_z, svint64_t,
 
 /*
 ** dup_254_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #254
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #254
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_254_s64_z, svint64_t,
@@ -635,9 +647,15 @@  TEST_UNIFORM_Z (dup_254_s64_z, svint64_t,
 
 /*
 ** dup_255_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #255
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_255_s64_z, svint64_t,
@@ -688,9 +706,15 @@  TEST_UNIFORM_Z (dup_7ffd_s64_z, svint64_t,
 
 /*
 ** dup_7ffe_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #32766
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #32766
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7ffe_s64_z, svint64_t,
@@ -699,9 +723,15 @@  TEST_UNIFORM_Z (dup_7ffe_s64_z, svint64_t,
 
 /*
 ** dup_7fff_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #32767
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7fff_s64_z, svint64_t,
@@ -728,9 +758,15 @@  TEST_UNIFORM_Z (dup_m128_s64_z, svint64_t,
 
 /*
 ** dup_m129_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-129
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-129
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m129_s64_z, svint64_t,
@@ -749,9 +785,15 @@  TEST_UNIFORM_Z (dup_m254_s64_z, svint64_t,
 
 /*
 ** dup_m255_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-255
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m255_s64_z, svint64_t,
@@ -769,9 +811,15 @@  TEST_UNIFORM_Z (dup_m256_s64_z, svint64_t,
 
 /*
 ** dup_m257_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-257
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-257
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m257_s64_z, svint64_t,
@@ -808,9 +856,15 @@  TEST_UNIFORM_Z (dup_m7f00_s64_z, svint64_t,
 
 /*
 ** dup_m7f01_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-32513
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-32513
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7f01_s64_z, svint64_t,
@@ -829,9 +883,15 @@  TEST_UNIFORM_Z (dup_m7ffe_s64_z, svint64_t,
 
 /*
 ** dup_m7fff_s64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-32767
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7fff_s64_z, svint64_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u16.c b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u16.c
index dba409d5b3b..09fecd44b88 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u16.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u16.c
@@ -611,9 +611,15 @@  TEST_UNIFORM_Z (dup_127_u16_z, svuint16_t,
 
 /*
 ** dup_128_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #128
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #128
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_128_u16_z, svuint16_t,
@@ -632,9 +638,15 @@  TEST_UNIFORM_Z (dup_253_u16_z, svuint16_t,
 
 /*
 ** dup_254_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #254
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #254
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_254_u16_z, svuint16_t,
@@ -643,9 +655,15 @@  TEST_UNIFORM_Z (dup_254_u16_z, svuint16_t,
 
 /*
 ** dup_255_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #255
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_255_u16_z, svuint16_t,
@@ -663,9 +681,15 @@  TEST_UNIFORM_Z (dup_256_u16_z, svuint16_t,
 
 /*
 ** dup_257_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+)\.b, #1
 **	sel	z0\.h, p0, \2\.h, \1\.h
+** |
+**	mov	(z[0-9]+)\.b, #1
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3\.h, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_257_u16_z, svuint16_t,
@@ -702,9 +726,15 @@  TEST_UNIFORM_Z (dup_7ffd_u16_z, svuint16_t,
 
 /*
 ** dup_7ffe_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #32766
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #32766
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7ffe_u16_z, svuint16_t,
@@ -713,9 +743,15 @@  TEST_UNIFORM_Z (dup_7ffe_u16_z, svuint16_t,
 
 /*
 ** dup_7fff_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #32767
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7fff_u16_z, svuint16_t,
@@ -742,9 +778,15 @@  TEST_UNIFORM_Z (dup_m128_u16_z, svuint16_t,
 
 /*
 ** dup_m129_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-129
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-129
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m129_u16_z, svuint16_t,
@@ -763,9 +805,15 @@  TEST_UNIFORM_Z (dup_m254_u16_z, svuint16_t,
 
 /*
 ** dup_m255_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-255
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m255_u16_z, svuint16_t,
@@ -783,9 +831,15 @@  TEST_UNIFORM_Z (dup_m256_u16_z, svuint16_t,
 
 /*
 ** dup_m257_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-257
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-257
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m257_u16_z, svuint16_t,
@@ -794,9 +848,15 @@  TEST_UNIFORM_Z (dup_m257_u16_z, svuint16_t,
 
 /*
 ** dup_m258_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+)\.b, #-2
 **	sel	z0\.h, p0, \2\.h, \1\.h
+** |
+**	mov	(z[0-9]+)\.b, #-2
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3\.h, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m258_u16_z, svuint16_t,
@@ -828,9 +888,15 @@  TEST_UNIFORM_Z (dup_m7f00_u16_z, svuint16_t,
 
 /*
 ** dup_m7f01_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-32513
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-32513
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7f01_u16_z, svuint16_t,
@@ -849,9 +915,15 @@  TEST_UNIFORM_Z (dup_m7ffe_u16_z, svuint16_t,
 
 /*
 ** dup_m7fff_u16_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.h), #-32767
 **	sel	z0\.h, p0, \2, \1\.h
+** |
+**	mov	(z[0-9]+\.h), #-32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.h, p0, \3, \4\.h
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7fff_u16_z, svuint16_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u32.c b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u32.c
index 7d5b4626fd4..4b7da13a456 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u32.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u32.c
@@ -603,9 +603,15 @@  TEST_UNIFORM_Z (dup_127_u32_z, svuint32_t,
 
 /*
 ** dup_128_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #128
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #128
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_128_u32_z, svuint32_t,
@@ -624,9 +630,15 @@  TEST_UNIFORM_Z (dup_253_u32_z, svuint32_t,
 
 /*
 ** dup_254_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #254
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #254
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_254_u32_z, svuint32_t,
@@ -635,9 +647,15 @@  TEST_UNIFORM_Z (dup_254_u32_z, svuint32_t,
 
 /*
 ** dup_255_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #255
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_255_u32_z, svuint32_t,
@@ -688,9 +706,15 @@  TEST_UNIFORM_Z (dup_7ffd_u32_z, svuint32_t,
 
 /*
 ** dup_7ffe_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #32766
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #32766
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7ffe_u32_z, svuint32_t,
@@ -699,9 +723,15 @@  TEST_UNIFORM_Z (dup_7ffe_u32_z, svuint32_t,
 
 /*
 ** dup_7fff_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #32767
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7fff_u32_z, svuint32_t,
@@ -728,9 +758,15 @@  TEST_UNIFORM_Z (dup_m128_u32_z, svuint32_t,
 
 /*
 ** dup_m129_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-129
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-129
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m129_u32_z, svuint32_t,
@@ -749,9 +785,15 @@  TEST_UNIFORM_Z (dup_m254_u32_z, svuint32_t,
 
 /*
 ** dup_m255_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-255
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m255_u32_z, svuint32_t,
@@ -769,9 +811,15 @@  TEST_UNIFORM_Z (dup_m256_u32_z, svuint32_t,
 
 /*
 ** dup_m257_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-257
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-257
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m257_u32_z, svuint32_t,
@@ -808,9 +856,15 @@  TEST_UNIFORM_Z (dup_m7f00_u32_z, svuint32_t,
 
 /*
 ** dup_m7f01_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-32513
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-32513
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7f01_u32_z, svuint32_t,
@@ -829,9 +883,15 @@  TEST_UNIFORM_Z (dup_m7ffe_u32_z, svuint32_t,
 
 /*
 ** dup_m7fff_u32_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.s), #-32767
 **	sel	z0\.s, p0, \2, \1\.s
+** |
+**	mov	(z[0-9]+\.s), #-32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.s, p0, \3, \4\.s
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7fff_u32_z, svuint32_t,
diff --git a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u64.c b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u64.c
index 0431e75bc65..4d64b40a90b 100644
--- a/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u64.c
+++ b/gcc/testsuite/gcc.target/aarch64/sve/acle/asm/dup_u64.c
@@ -603,9 +603,15 @@  TEST_UNIFORM_Z (dup_127_u64_z, svuint64_t,
 
 /*
 ** dup_128_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #128
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #128
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_128_u64_z, svuint64_t,
@@ -624,9 +630,15 @@  TEST_UNIFORM_Z (dup_253_u64_z, svuint64_t,
 
 /*
 ** dup_254_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #254
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #254
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_254_u64_z, svuint64_t,
@@ -635,9 +647,15 @@  TEST_UNIFORM_Z (dup_254_u64_z, svuint64_t,
 
 /*
 ** dup_255_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #255
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_255_u64_z, svuint64_t,
@@ -688,9 +706,15 @@  TEST_UNIFORM_Z (dup_7ffd_u64_z, svuint64_t,
 
 /*
 ** dup_7ffe_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #32766
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #32766
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7ffe_u64_z, svuint64_t,
@@ -699,9 +723,15 @@  TEST_UNIFORM_Z (dup_7ffe_u64_z, svuint64_t,
 
 /*
 ** dup_7fff_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #32767
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_7fff_u64_z, svuint64_t,
@@ -728,9 +758,15 @@  TEST_UNIFORM_Z (dup_m128_u64_z, svuint64_t,
 
 /*
 ** dup_m129_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-129
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-129
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m129_u64_z, svuint64_t,
@@ -749,9 +785,15 @@  TEST_UNIFORM_Z (dup_m254_u64_z, svuint64_t,
 
 /*
 ** dup_m255_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-255
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-255
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m255_u64_z, svuint64_t,
@@ -769,9 +811,15 @@  TEST_UNIFORM_Z (dup_m256_u64_z, svuint64_t,
 
 /*
 ** dup_m257_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-257
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-257
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m257_u64_z, svuint64_t,
@@ -808,9 +856,15 @@  TEST_UNIFORM_Z (dup_m7f00_u64_z, svuint64_t,
 
 /*
 ** dup_m7f01_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-32513
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-32513
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7f01_u64_z, svuint64_t,
@@ -829,9 +883,15 @@  TEST_UNIFORM_Z (dup_m7ffe_u64_z, svuint64_t,
 
 /*
 ** dup_m7fff_u64_z:
+** (
 **	mov	(z[0-9]+)\.b, #0
 **	mov	(z[0-9]+\.d), #-32767
 **	sel	z0\.d, p0, \2, \1\.d
+** |
+**	mov	(z[0-9]+\.d), #-32767
+**	mov	(z[0-9]+)\.b, #0
+**	sel	z0\.d, p0, \3, \4\.d
+** )
 **	ret
 */
 TEST_UNIFORM_Z (dup_m7fff_u64_z, svuint64_t,