@@ -9040,7 +9040,7 @@ (define_insn "@aarch64_sve_tbl<mode>"
(match_operand:<V_INT_EQUIV> 2 "register_operand" "w")]
UNSPEC_TBL))]
"TARGET_SVE"
- "tbl\t%0.<Vetype>, %1.<Vetype>, %2.<Vetype>"
+ "tbl\t%0.<Vetype>, {%1.<Vetype>}, %2.<Vetype>"
)
;; -------------------------------------------------------------------------
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_bf16_tied1:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_bf16_tied1, svbfloat16_t, uint16_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_bf16_tied1, svbfloat16_t, uint16_t,
/*
** dup_lane_w0_bf16_untied:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z1\.h, \1
+** tbl z0\.h, {z1\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_bf16_untied, svbfloat16_t, uint16_t,
@@ -70,7 +70,7 @@ TEST_UNIFORM_Z (dup_lane_31_bf16, svbfloat16_t,
/*
** dup_lane_32_bf16:
** mov (z[0-9]+\.h), #32
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_bf16, svbfloat16_t,
@@ -80,7 +80,7 @@ TEST_UNIFORM_Z (dup_lane_32_bf16, svbfloat16_t,
/*
** dup_lane_63_bf16:
** mov (z[0-9]+\.h), #63
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_bf16, svbfloat16_t,
@@ -90,7 +90,7 @@ TEST_UNIFORM_Z (dup_lane_63_bf16, svbfloat16_t,
/*
** dup_lane_64_bf16:
** mov (z[0-9]+\.h), #64
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_bf16, svbfloat16_t,
@@ -100,7 +100,7 @@ TEST_UNIFORM_Z (dup_lane_64_bf16, svbfloat16_t,
/*
** dup_lane_255_bf16:
** mov (z[0-9]+\.h), #255
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_bf16, svbfloat16_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_f16_tied1:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_f16_tied1, svfloat16_t, uint16_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_f16_tied1, svfloat16_t, uint16_t,
/*
** dup_lane_w0_f16_untied:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z1\.h, \1
+** tbl z0\.h, {z1\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_f16_untied, svfloat16_t, uint16_t,
@@ -70,7 +70,7 @@ TEST_UNIFORM_Z (dup_lane_31_f16, svfloat16_t,
/*
** dup_lane_32_f16:
** mov (z[0-9]+\.h), #32
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_f16, svfloat16_t,
@@ -80,7 +80,7 @@ TEST_UNIFORM_Z (dup_lane_32_f16, svfloat16_t,
/*
** dup_lane_63_f16:
** mov (z[0-9]+\.h), #63
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_f16, svfloat16_t,
@@ -90,7 +90,7 @@ TEST_UNIFORM_Z (dup_lane_63_f16, svfloat16_t,
/*
** dup_lane_64_f16:
** mov (z[0-9]+\.h), #64
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_f16, svfloat16_t,
@@ -100,7 +100,7 @@ TEST_UNIFORM_Z (dup_lane_64_f16, svfloat16_t,
/*
** dup_lane_255_f16:
** mov (z[0-9]+\.h), #255
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_f16, svfloat16_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_f32_tied1:
** mov (z[0-9]+\.s), w0
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_f32_tied1, svfloat32_t, uint32_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_f32_tied1, svfloat32_t, uint32_t,
/*
** dup_lane_w0_f32_untied:
** mov (z[0-9]+\.s), w0
-** tbl z0\.s, z1\.s, \1
+** tbl z0\.s, {z1\.s}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_f32_untied, svfloat32_t, uint32_t,
@@ -52,7 +52,7 @@ TEST_UNIFORM_Z (dup_lane_15_f32, svfloat32_t,
/*
** dup_lane_16_f32:
** mov (z[0-9]+\.s), #16
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_16_f32, svfloat32_t,
@@ -62,7 +62,7 @@ TEST_UNIFORM_Z (dup_lane_16_f32, svfloat32_t,
/*
** dup_lane_31_f32:
** mov (z[0-9]+\.s), #31
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_31_f32, svfloat32_t,
@@ -72,7 +72,7 @@ TEST_UNIFORM_Z (dup_lane_31_f32, svfloat32_t,
/*
** dup_lane_32_f32:
** mov (z[0-9]+\.s), #32
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_f32, svfloat32_t,
@@ -82,7 +82,7 @@ TEST_UNIFORM_Z (dup_lane_32_f32, svfloat32_t,
/*
** dup_lane_63_f32:
** mov (z[0-9]+\.s), #63
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_f32, svfloat32_t,
@@ -92,7 +92,7 @@ TEST_UNIFORM_Z (dup_lane_63_f32, svfloat32_t,
/*
** dup_lane_64_f32:
** mov (z[0-9]+\.s), #64
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_f32, svfloat32_t,
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (dup_lane_64_f32, svfloat32_t,
/*
** dup_lane_255_f32:
** mov (z[0-9]+\.s), #255
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_f32, svfloat32_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_x0_f64_tied1:
** mov (z[0-9]+\.d), x0
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_x0_f64_tied1, svfloat64_t, uint64_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_x0_f64_tied1, svfloat64_t, uint64_t,
/*
** dup_lane_x0_f64_untied:
** mov (z[0-9]+\.d), x0
-** tbl z0\.d, z1\.d, \1
+** tbl z0\.d, {z1\.d}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_x0_f64_untied, svfloat64_t, uint64_t,
@@ -43,7 +43,7 @@ TEST_UNIFORM_Z (dup_lane_0_f64_untied, svfloat64_t,
/*
** dup_lane_15_f64:
** mov (z[0-9]+\.d), #15
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_15_f64, svfloat64_t,
@@ -53,7 +53,7 @@ TEST_UNIFORM_Z (dup_lane_15_f64, svfloat64_t,
/*
** dup_lane_16_f64:
** mov (z[0-9]+\.d), #16
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_16_f64, svfloat64_t,
@@ -63,7 +63,7 @@ TEST_UNIFORM_Z (dup_lane_16_f64, svfloat64_t,
/*
** dup_lane_31_f64:
** mov (z[0-9]+\.d), #31
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_31_f64, svfloat64_t,
@@ -73,7 +73,7 @@ TEST_UNIFORM_Z (dup_lane_31_f64, svfloat64_t,
/*
** dup_lane_32_f64:
** mov (z[0-9]+\.d), #32
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_f64, svfloat64_t,
@@ -83,7 +83,7 @@ TEST_UNIFORM_Z (dup_lane_32_f64, svfloat64_t,
/*
** dup_lane_63_f64:
** mov (z[0-9]+\.d), #63
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_f64, svfloat64_t,
@@ -93,7 +93,7 @@ TEST_UNIFORM_Z (dup_lane_63_f64, svfloat64_t,
/*
** dup_lane_64_f64:
** mov (z[0-9]+\.d), #64
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_f64, svfloat64_t,
@@ -103,7 +103,7 @@ TEST_UNIFORM_Z (dup_lane_64_f64, svfloat64_t,
/*
** dup_lane_255_f64:
** mov (z[0-9]+\.d), #255
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_f64, svfloat64_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_s16_tied1:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_s16_tied1, svint16_t, uint16_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_s16_tied1, svint16_t, uint16_t,
/*
** dup_lane_w0_s16_untied:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z1\.h, \1
+** tbl z0\.h, {z1\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_s16_untied, svint16_t, uint16_t,
@@ -88,7 +88,7 @@ TEST_UNIFORM_Z (dup_lane_31_s16, svint16_t,
/*
** dup_lane_32_s16:
** mov (z[0-9]+\.h), #32
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_s16, svint16_t,
@@ -98,7 +98,7 @@ TEST_UNIFORM_Z (dup_lane_32_s16, svint16_t,
/*
** dup_lane_63_s16:
** mov (z[0-9]+\.h), #63
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_s16, svint16_t,
@@ -108,7 +108,7 @@ TEST_UNIFORM_Z (dup_lane_63_s16, svint16_t,
/*
** dup_lane_64_s16:
** mov (z[0-9]+\.h), #64
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_s16, svint16_t,
@@ -118,7 +118,7 @@ TEST_UNIFORM_Z (dup_lane_64_s16, svint16_t,
/*
** dup_lane_255_s16:
** mov (z[0-9]+\.h), #255
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_s16, svint16_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_s32_tied1:
** mov (z[0-9]+\.s), w0
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_s32_tied1, svint32_t, uint32_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_s32_tied1, svint32_t, uint32_t,
/*
** dup_lane_w0_s32_untied:
** mov (z[0-9]+\.s), w0
-** tbl z0\.s, z1\.s, \1
+** tbl z0\.s, {z1\.s}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_s32_untied, svint32_t, uint32_t,
@@ -70,7 +70,7 @@ TEST_UNIFORM_Z (dup_lane_15_s32, svint32_t,
/*
** dup_lane_16_s32:
** mov (z[0-9]+\.s), #16
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_16_s32, svint32_t,
@@ -80,7 +80,7 @@ TEST_UNIFORM_Z (dup_lane_16_s32, svint32_t,
/*
** dup_lane_31_s32:
** mov (z[0-9]+\.s), #31
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_31_s32, svint32_t,
@@ -90,7 +90,7 @@ TEST_UNIFORM_Z (dup_lane_31_s32, svint32_t,
/*
** dup_lane_32_s32:
** mov (z[0-9]+\.s), #32
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_s32, svint32_t,
@@ -100,7 +100,7 @@ TEST_UNIFORM_Z (dup_lane_32_s32, svint32_t,
/*
** dup_lane_63_s32:
** mov (z[0-9]+\.s), #63
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_s32, svint32_t,
@@ -110,7 +110,7 @@ TEST_UNIFORM_Z (dup_lane_63_s32, svint32_t,
/*
** dup_lane_64_s32:
** mov (z[0-9]+\.s), #64
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_s32, svint32_t,
@@ -120,7 +120,7 @@ TEST_UNIFORM_Z (dup_lane_64_s32, svint32_t,
/*
** dup_lane_255_s32:
** mov (z[0-9]+\.s), #255
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_s32, svint32_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_x0_s64_tied1:
** mov (z[0-9]+\.d), x0
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_x0_s64_tied1, svint64_t, uint64_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_x0_s64_tied1, svint64_t, uint64_t,
/*
** dup_lane_x0_s64_untied:
** mov (z[0-9]+\.d), x0
-** tbl z0\.d, z1\.d, \1
+** tbl z0\.d, {z1\.d}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_x0_s64_untied, svint64_t, uint64_t,
@@ -52,7 +52,7 @@ TEST_UNIFORM_Z (dup_lane_7_s64, svint64_t,
/*
** dup_lane_8_s64:
** mov (z[0-9]+\.d), #8
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_8_s64, svint64_t,
@@ -62,7 +62,7 @@ TEST_UNIFORM_Z (dup_lane_8_s64, svint64_t,
/*
** dup_lane_15_s64:
** mov (z[0-9]+\.d), #15
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_15_s64, svint64_t,
@@ -72,7 +72,7 @@ TEST_UNIFORM_Z (dup_lane_15_s64, svint64_t,
/*
** dup_lane_16_s64:
** mov (z[0-9]+\.d), #16
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_16_s64, svint64_t,
@@ -82,7 +82,7 @@ TEST_UNIFORM_Z (dup_lane_16_s64, svint64_t,
/*
** dup_lane_31_s64:
** mov (z[0-9]+\.d), #31
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_31_s64, svint64_t,
@@ -92,7 +92,7 @@ TEST_UNIFORM_Z (dup_lane_31_s64, svint64_t,
/*
** dup_lane_32_s64:
** mov (z[0-9]+\.d), #32
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_s64, svint64_t,
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (dup_lane_32_s64, svint64_t,
/*
** dup_lane_63_s64:
** mov (z[0-9]+\.d), #63
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_s64, svint64_t,
@@ -112,7 +112,7 @@ TEST_UNIFORM_Z (dup_lane_63_s64, svint64_t,
/*
** dup_lane_64_s64:
** mov (z[0-9]+\.d), #64
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_s64, svint64_t,
@@ -122,7 +122,7 @@ TEST_UNIFORM_Z (dup_lane_64_s64, svint64_t,
/*
** dup_lane_255_s64:
** mov (z[0-9]+\.d), #255
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_s64, svint64_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_s8_tied1:
** mov (z[0-9]+\.b), w0
-** tbl z0\.b, z0\.b, \1
+** tbl z0\.b, {z0\.b}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_s8_tied1, svint8_t, uint8_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_s8_tied1, svint8_t, uint8_t,
/*
** dup_lane_w0_s8_untied:
** mov (z[0-9]+\.b), w0
-** tbl z0\.b, z1\.b, \1
+** tbl z0\.b, {z1\.b}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_s8_untied, svint8_t, uint8_t,
@@ -106,7 +106,7 @@ TEST_UNIFORM_Z (dup_lane_63_s8, svint8_t,
/*
** dup_lane_64_s8:
** mov (z[0-9]+\.b), #64
-** tbl z0\.b, z0\.b, \1
+** tbl z0\.b, {z0\.b}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_s8, svint8_t,
@@ -116,7 +116,7 @@ TEST_UNIFORM_Z (dup_lane_64_s8, svint8_t,
/*
** dup_lane_255_s8:
** mov (z[0-9]+\.b), #-1
-** tbl z0\.b, z0\.b, \1
+** tbl z0\.b, {z0\.b}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_s8, svint8_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_u16_tied1:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_u16_tied1, svuint16_t, uint16_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_u16_tied1, svuint16_t, uint16_t,
/*
** dup_lane_w0_u16_untied:
** mov (z[0-9]+\.h), w0
-** tbl z0\.h, z1\.h, \1
+** tbl z0\.h, {z1\.h}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_u16_untied, svuint16_t, uint16_t,
@@ -88,7 +88,7 @@ TEST_UNIFORM_Z (dup_lane_31_u16, svuint16_t,
/*
** dup_lane_32_u16:
** mov (z[0-9]+\.h), #32
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_u16, svuint16_t,
@@ -98,7 +98,7 @@ TEST_UNIFORM_Z (dup_lane_32_u16, svuint16_t,
/*
** dup_lane_63_u16:
** mov (z[0-9]+\.h), #63
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_u16, svuint16_t,
@@ -108,7 +108,7 @@ TEST_UNIFORM_Z (dup_lane_63_u16, svuint16_t,
/*
** dup_lane_64_u16:
** mov (z[0-9]+\.h), #64
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_u16, svuint16_t,
@@ -118,7 +118,7 @@ TEST_UNIFORM_Z (dup_lane_64_u16, svuint16_t,
/*
** dup_lane_255_u16:
** mov (z[0-9]+\.h), #255
-** tbl z0\.h, z0\.h, \1
+** tbl z0\.h, {z0\.h}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_u16, svuint16_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_u32_tied1:
** mov (z[0-9]+\.s), w0
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_u32_tied1, svuint32_t, uint32_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_u32_tied1, svuint32_t, uint32_t,
/*
** dup_lane_w0_u32_untied:
** mov (z[0-9]+\.s), w0
-** tbl z0\.s, z1\.s, \1
+** tbl z0\.s, {z1\.s}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_u32_untied, svuint32_t, uint32_t,
@@ -70,7 +70,7 @@ TEST_UNIFORM_Z (dup_lane_15_u32, svuint32_t,
/*
** dup_lane_16_u32:
** mov (z[0-9]+\.s), #16
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_16_u32, svuint32_t,
@@ -80,7 +80,7 @@ TEST_UNIFORM_Z (dup_lane_16_u32, svuint32_t,
/*
** dup_lane_31_u32:
** mov (z[0-9]+\.s), #31
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_31_u32, svuint32_t,
@@ -90,7 +90,7 @@ TEST_UNIFORM_Z (dup_lane_31_u32, svuint32_t,
/*
** dup_lane_32_u32:
** mov (z[0-9]+\.s), #32
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_u32, svuint32_t,
@@ -100,7 +100,7 @@ TEST_UNIFORM_Z (dup_lane_32_u32, svuint32_t,
/*
** dup_lane_63_u32:
** mov (z[0-9]+\.s), #63
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_u32, svuint32_t,
@@ -110,7 +110,7 @@ TEST_UNIFORM_Z (dup_lane_63_u32, svuint32_t,
/*
** dup_lane_64_u32:
** mov (z[0-9]+\.s), #64
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_u32, svuint32_t,
@@ -120,7 +120,7 @@ TEST_UNIFORM_Z (dup_lane_64_u32, svuint32_t,
/*
** dup_lane_255_u32:
** mov (z[0-9]+\.s), #255
-** tbl z0\.s, z0\.s, \1
+** tbl z0\.s, {z0\.s}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_u32, svuint32_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_x0_u64_tied1:
** mov (z[0-9]+\.d), x0
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_x0_u64_tied1, svuint64_t, uint64_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_x0_u64_tied1, svuint64_t, uint64_t,
/*
** dup_lane_x0_u64_untied:
** mov (z[0-9]+\.d), x0
-** tbl z0\.d, z1\.d, \1
+** tbl z0\.d, {z1\.d}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_x0_u64_untied, svuint64_t, uint64_t,
@@ -52,7 +52,7 @@ TEST_UNIFORM_Z (dup_lane_7_u64, svuint64_t,
/*
** dup_lane_8_u64:
** mov (z[0-9]+\.d), #8
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_8_u64, svuint64_t,
@@ -62,7 +62,7 @@ TEST_UNIFORM_Z (dup_lane_8_u64, svuint64_t,
/*
** dup_lane_15_u64:
** mov (z[0-9]+\.d), #15
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_15_u64, svuint64_t,
@@ -72,7 +72,7 @@ TEST_UNIFORM_Z (dup_lane_15_u64, svuint64_t,
/*
** dup_lane_16_u64:
** mov (z[0-9]+\.d), #16
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_16_u64, svuint64_t,
@@ -82,7 +82,7 @@ TEST_UNIFORM_Z (dup_lane_16_u64, svuint64_t,
/*
** dup_lane_31_u64:
** mov (z[0-9]+\.d), #31
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_31_u64, svuint64_t,
@@ -92,7 +92,7 @@ TEST_UNIFORM_Z (dup_lane_31_u64, svuint64_t,
/*
** dup_lane_32_u64:
** mov (z[0-9]+\.d), #32
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_32_u64, svuint64_t,
@@ -102,7 +102,7 @@ TEST_UNIFORM_Z (dup_lane_32_u64, svuint64_t,
/*
** dup_lane_63_u64:
** mov (z[0-9]+\.d), #63
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_63_u64, svuint64_t,
@@ -112,7 +112,7 @@ TEST_UNIFORM_Z (dup_lane_63_u64, svuint64_t,
/*
** dup_lane_64_u64:
** mov (z[0-9]+\.d), #64
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_u64, svuint64_t,
@@ -122,7 +122,7 @@ TEST_UNIFORM_Z (dup_lane_64_u64, svuint64_t,
/*
** dup_lane_255_u64:
** mov (z[0-9]+\.d), #255
-** tbl z0\.d, z0\.d, \1
+** tbl z0\.d, {z0\.d}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_u64, svuint64_t,
@@ -5,7 +5,7 @@
/*
** dup_lane_w0_u8_tied1:
** mov (z[0-9]+\.b), w0
-** tbl z0\.b, z0\.b, \1
+** tbl z0\.b, {z0\.b}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_u8_tied1, svuint8_t, uint8_t,
@@ -15,7 +15,7 @@ TEST_UNIFORM_ZX (dup_lane_w0_u8_tied1, svuint8_t, uint8_t,
/*
** dup_lane_w0_u8_untied:
** mov (z[0-9]+\.b), w0
-** tbl z0\.b, z1\.b, \1
+** tbl z0\.b, {z1\.b}, \1
** ret
*/
TEST_UNIFORM_ZX (dup_lane_w0_u8_untied, svuint8_t, uint8_t,
@@ -106,7 +106,7 @@ TEST_UNIFORM_Z (dup_lane_63_u8, svuint8_t,
/*
** dup_lane_64_u8:
** mov (z[0-9]+\.b), #64
-** tbl z0\.b, z0\.b, \1
+** tbl z0\.b, {z0\.b}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_64_u8, svuint8_t,
@@ -116,7 +116,7 @@ TEST_UNIFORM_Z (dup_lane_64_u8, svuint8_t,
/*
** dup_lane_255_u8:
** mov (z[0-9]+\.b), #-1
-** tbl z0\.b, z0\.b, \1
+** tbl z0\.b, {z0\.b}, \1
** ret
*/
TEST_UNIFORM_Z (dup_lane_255_u8, svuint8_t,
@@ -4,7 +4,7 @@
/*
** tbl_bf16_tied1:
-** tbl z0\.h, z0\.h, z4\.h
+** tbl z0\.h, {z0\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_bf16_tied1, svbfloat16_t, svuint16_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_bf16_tied1, svbfloat16_t, svuint16_t,
/*
** tbl_bf16_tied2:
-** tbl z0\.h, z4\.h, z0\.h
+** tbl z0\.h, {z4\.h}, z0\.h
** ret
*/
TEST_DUAL_Z_REV (tbl_bf16_tied2, svbfloat16_t, svuint16_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_bf16_tied2, svbfloat16_t, svuint16_t,
/*
** tbl_bf16_untied:
-** tbl z0\.h, z1\.h, z4\.h
+** tbl z0\.h, {z1\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_bf16_untied, svbfloat16_t, svuint16_t,
@@ -4,7 +4,7 @@
/*
** tbl_f16_tied1:
-** tbl z0\.h, z0\.h, z4\.h
+** tbl z0\.h, {z0\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_f16_tied1, svfloat16_t, svuint16_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_f16_tied1, svfloat16_t, svuint16_t,
/*
** tbl_f16_tied2:
-** tbl z0\.h, z4\.h, z0\.h
+** tbl z0\.h, {z4\.h}, z0\.h
** ret
*/
TEST_DUAL_Z_REV (tbl_f16_tied2, svfloat16_t, svuint16_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_f16_tied2, svfloat16_t, svuint16_t,
/*
** tbl_f16_untied:
-** tbl z0\.h, z1\.h, z4\.h
+** tbl z0\.h, {z1\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_f16_untied, svfloat16_t, svuint16_t,
@@ -4,7 +4,7 @@
/*
** tbl_f32_tied1:
-** tbl z0\.s, z0\.s, z4\.s
+** tbl z0\.s, {z0\.s}, z4\.s
** ret
*/
TEST_DUAL_Z (tbl_f32_tied1, svfloat32_t, svuint32_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_f32_tied1, svfloat32_t, svuint32_t,
/*
** tbl_f32_tied2:
-** tbl z0\.s, z4\.s, z0\.s
+** tbl z0\.s, {z4\.s}, z0\.s
** ret
*/
TEST_DUAL_Z_REV (tbl_f32_tied2, svfloat32_t, svuint32_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_f32_tied2, svfloat32_t, svuint32_t,
/*
** tbl_f32_untied:
-** tbl z0\.s, z1\.s, z4\.s
+** tbl z0\.s, {z1\.s}, z4\.s
** ret
*/
TEST_DUAL_Z (tbl_f32_untied, svfloat32_t, svuint32_t,
@@ -4,7 +4,7 @@
/*
** tbl_f64_tied1:
-** tbl z0\.d, z0\.d, z4\.d
+** tbl z0\.d, {z0\.d}, z4\.d
** ret
*/
TEST_DUAL_Z (tbl_f64_tied1, svfloat64_t, svuint64_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_f64_tied1, svfloat64_t, svuint64_t,
/*
** tbl_f64_tied2:
-** tbl z0\.d, z4\.d, z0\.d
+** tbl z0\.d, {z4\.d}, z0\.d
** ret
*/
TEST_DUAL_Z_REV (tbl_f64_tied2, svfloat64_t, svuint64_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_f64_tied2, svfloat64_t, svuint64_t,
/*
** tbl_f64_untied:
-** tbl z0\.d, z1\.d, z4\.d
+** tbl z0\.d, {z1\.d}, z4\.d
** ret
*/
TEST_DUAL_Z (tbl_f64_untied, svfloat64_t, svuint64_t,
@@ -4,7 +4,7 @@
/*
** tbl_s16_tied1:
-** tbl z0\.h, z0\.h, z4\.h
+** tbl z0\.h, {z0\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_s16_tied1, svint16_t, svuint16_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_s16_tied1, svint16_t, svuint16_t,
/*
** tbl_s16_tied2:
-** tbl z0\.h, z4\.h, z0\.h
+** tbl z0\.h, {z4\.h}, z0\.h
** ret
*/
TEST_DUAL_Z_REV (tbl_s16_tied2, svint16_t, svuint16_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_s16_tied2, svint16_t, svuint16_t,
/*
** tbl_s16_untied:
-** tbl z0\.h, z1\.h, z4\.h
+** tbl z0\.h, {z1\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_s16_untied, svint16_t, svuint16_t,
@@ -4,7 +4,7 @@
/*
** tbl_s32_tied1:
-** tbl z0\.s, z0\.s, z4\.s
+** tbl z0\.s, {z0\.s}, z4\.s
** ret
*/
TEST_DUAL_Z (tbl_s32_tied1, svint32_t, svuint32_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_s32_tied1, svint32_t, svuint32_t,
/*
** tbl_s32_tied2:
-** tbl z0\.s, z4\.s, z0\.s
+** tbl z0\.s, {z4\.s}, z0\.s
** ret
*/
TEST_DUAL_Z_REV (tbl_s32_tied2, svint32_t, svuint32_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_s32_tied2, svint32_t, svuint32_t,
/*
** tbl_s32_untied:
-** tbl z0\.s, z1\.s, z4\.s
+** tbl z0\.s, {z1\.s}, z4\.s
** ret
*/
TEST_DUAL_Z (tbl_s32_untied, svint32_t, svuint32_t,
@@ -4,7 +4,7 @@
/*
** tbl_s64_tied1:
-** tbl z0\.d, z0\.d, z4\.d
+** tbl z0\.d, {z0\.d}, z4\.d
** ret
*/
TEST_DUAL_Z (tbl_s64_tied1, svint64_t, svuint64_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_s64_tied1, svint64_t, svuint64_t,
/*
** tbl_s64_tied2:
-** tbl z0\.d, z4\.d, z0\.d
+** tbl z0\.d, {z4\.d}, z0\.d
** ret
*/
TEST_DUAL_Z_REV (tbl_s64_tied2, svint64_t, svuint64_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_s64_tied2, svint64_t, svuint64_t,
/*
** tbl_s64_untied:
-** tbl z0\.d, z1\.d, z4\.d
+** tbl z0\.d, {z1\.d}, z4\.d
** ret
*/
TEST_DUAL_Z (tbl_s64_untied, svint64_t, svuint64_t,
@@ -4,7 +4,7 @@
/*
** tbl_s8_tied1:
-** tbl z0\.b, z0\.b, z4\.b
+** tbl z0\.b, {z0\.b}, z4\.b
** ret
*/
TEST_DUAL_Z (tbl_s8_tied1, svint8_t, svuint8_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_s8_tied1, svint8_t, svuint8_t,
/*
** tbl_s8_tied2:
-** tbl z0\.b, z4\.b, z0\.b
+** tbl z0\.b, {z4\.b}, z0\.b
** ret
*/
TEST_DUAL_Z_REV (tbl_s8_tied2, svint8_t, svuint8_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_s8_tied2, svint8_t, svuint8_t,
/*
** tbl_s8_untied:
-** tbl z0\.b, z1\.b, z4\.b
+** tbl z0\.b, {z1\.b}, z4\.b
** ret
*/
TEST_DUAL_Z (tbl_s8_untied, svint8_t, svuint8_t,
@@ -4,7 +4,7 @@
/*
** tbl_u16_tied1:
-** tbl z0\.h, z0\.h, z4\.h
+** tbl z0\.h, {z0\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_u16_tied1, svuint16_t, svuint16_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_u16_tied1, svuint16_t, svuint16_t,
/*
** tbl_u16_tied2:
-** tbl z0\.h, z4\.h, z0\.h
+** tbl z0\.h, {z4\.h}, z0\.h
** ret
*/
TEST_DUAL_Z_REV (tbl_u16_tied2, svuint16_t, svuint16_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_u16_tied2, svuint16_t, svuint16_t,
/*
** tbl_u16_untied:
-** tbl z0\.h, z1\.h, z4\.h
+** tbl z0\.h, {z1\.h}, z4\.h
** ret
*/
TEST_DUAL_Z (tbl_u16_untied, svuint16_t, svuint16_t,
@@ -4,7 +4,7 @@
/*
** tbl_u32_tied1:
-** tbl z0\.s, z0\.s, z4\.s
+** tbl z0\.s, {z0\.s}, z4\.s
** ret
*/
TEST_DUAL_Z (tbl_u32_tied1, svuint32_t, svuint32_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_u32_tied1, svuint32_t, svuint32_t,
/*
** tbl_u32_tied2:
-** tbl z0\.s, z4\.s, z0\.s
+** tbl z0\.s, {z4\.s}, z0\.s
** ret
*/
TEST_DUAL_Z_REV (tbl_u32_tied2, svuint32_t, svuint32_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_u32_tied2, svuint32_t, svuint32_t,
/*
** tbl_u32_untied:
-** tbl z0\.s, z1\.s, z4\.s
+** tbl z0\.s, {z1\.s}, z4\.s
** ret
*/
TEST_DUAL_Z (tbl_u32_untied, svuint32_t, svuint32_t,
@@ -4,7 +4,7 @@
/*
** tbl_u64_tied1:
-** tbl z0\.d, z0\.d, z4\.d
+** tbl z0\.d, {z0\.d}, z4\.d
** ret
*/
TEST_DUAL_Z (tbl_u64_tied1, svuint64_t, svuint64_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_u64_tied1, svuint64_t, svuint64_t,
/*
** tbl_u64_tied2:
-** tbl z0\.d, z4\.d, z0\.d
+** tbl z0\.d, {z4\.d}, z0\.d
** ret
*/
TEST_DUAL_Z_REV (tbl_u64_tied2, svuint64_t, svuint64_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_u64_tied2, svuint64_t, svuint64_t,
/*
** tbl_u64_untied:
-** tbl z0\.d, z1\.d, z4\.d
+** tbl z0\.d, {z1\.d}, z4\.d
** ret
*/
TEST_DUAL_Z (tbl_u64_untied, svuint64_t, svuint64_t,
@@ -4,7 +4,7 @@
/*
** tbl_u8_tied1:
-** tbl z0\.b, z0\.b, z4\.b
+** tbl z0\.b, {z0\.b}, z4\.b
** ret
*/
TEST_DUAL_Z (tbl_u8_tied1, svuint8_t, svuint8_t,
@@ -13,7 +13,7 @@ TEST_DUAL_Z (tbl_u8_tied1, svuint8_t, svuint8_t,
/*
** tbl_u8_tied2:
-** tbl z0\.b, z4\.b, z0\.b
+** tbl z0\.b, {z4\.b}, z0\.b
** ret
*/
TEST_DUAL_Z_REV (tbl_u8_tied2, svuint8_t, svuint8_t,
@@ -22,7 +22,7 @@ TEST_DUAL_Z_REV (tbl_u8_tied2, svuint8_t, svuint8_t,
/*
** tbl_u8_untied:
-** tbl z0\.b, z1\.b, z4\.b
+** tbl z0\.b, {z1\.b}, z4\.b
** ret
*/
TEST_DUAL_Z (tbl_u8_untied, svuint8_t, svuint8_t,
@@ -19,4 +19,4 @@ f (uint8_t *restrict a, uint8_t *restrict b)
}
}
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 1 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, {z[0-9]+\.b}, z[0-9]+\.b\n} 1 } } */
@@ -19,4 +19,4 @@ f (uint8_t *restrict a, uint8_t *restrict b)
}
}
-/* { dg-final { scan-assembler {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} } } */
+/* { dg-final { scan-assembler {\ttbl\tz[0-9]+\.b, {z[0-9]+\.b}, z[0-9]+\.b\n} } } */
@@ -31,10 +31,10 @@ UZP1 (vnx4sf, ((vnx4si) { 0, 2, 4, 6, 8, 10, 12, 14 }));
UZP1 (vnx8hf, ((vnx8hi) { 0, 2, 4, 6, 8, 10, 12, 14,
16, 18, 20, 22, 24, 26, 28, 30 }));
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} } } */
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} } } */
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} } } */
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
/* { dg-final { scan-assembler-times {\tuzp1\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 2 } } */
/* { dg-final { scan-assembler-times {\tuzp1\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 2 } } */
@@ -30,10 +30,10 @@ UZP2 (vnx4sf, ((vnx4si) { 1, 3, 5, 7, 9, 11, 13, 15 }));
UZP2 (vnx8hf, ((vnx8hi) { 1, 3, 5, 7, 9, 11, 13, 15,
17, 19, 21, 23, 25, 27, 29, 31 }));
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} } } */
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} } } */
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} } } */
-/* { dg-final { scan-assembler-not {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
+/* { dg-final { scan-assembler-not {\ttbl\t} } } */
/* { dg-final { scan-assembler-times {\tuzp2\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 2 } } */
/* { dg-final { scan-assembler-times {\tuzp2\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 2 } } */
@@ -26,7 +26,7 @@ VEC_PERM (vnx2df, vnx2di);
VEC_PERM (vnx4sf, vnx4si);
VEC_PERM (vnx8hf, vnx8hi);
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, {z[0-9]+\.d}, z[0-9]+\.d\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, {z[0-9]+\.s}, z[0-9]+\.s\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, {z[0-9]+\.h}, z[0-9]+\.h\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, {z[0-9]+\.b}, z[0-9]+\.b\n} 2 } } */
@@ -31,7 +31,7 @@ VEC_PERM_CONST (vnx4sf, ((vnx4si) { 1, 9, 13, 11, 2, 5, 4, 2 }));
VEC_PERM_CONST (vnx8hf, ((vnx8hi) { 8, 27, 5, 4, 21, 12, 13, 0,
22, 1, 8, 9, 3, 24, 15, 1 }));
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, {z[0-9]+\.d}, z[0-9]+\.d\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, {z[0-9]+\.s}, z[0-9]+\.s\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, {z[0-9]+\.h}, z[0-9]+\.h\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, {z[0-9]+\.b}, z[0-9]+\.b\n} 2 } } */
@@ -62,7 +62,7 @@ VEC_PERM_CONST_OVERRUN (vnx8hf, ((vnx8hi) { 8 + (32 * 3), 27 + (32 * 1),
3 + (32 * 2), 24 + (32 * 2),
15 + (32 * 1), 1 + (32 * 1) }));
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 4 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, {z[0-9]+\.d}, z[0-9]+\.d\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, {z[0-9]+\.s}, z[0-9]+\.s\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, {z[0-9]+\.h}, z[0-9]+\.h\n} 4 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, {z[0-9]+\.b}, z[0-9]+\.b\n} 2 } } */
@@ -30,7 +30,7 @@ VEC_PERM_SINGLE (vnx4sf, ((vnx4si) { 4, 5, 6, 0, 2, 7, 4, 2 }));
VEC_PERM_SINGLE (vnx8hf, ((vnx8hi) { 8, 7, 5, 4, 11, 12, 13, 0,
1, 1, 8, 9, 3, 14, 15, 1 }));
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 2 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 2 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 2 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 1 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, {z[0-9]+\.d}, z[0-9]+\.d\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, {z[0-9]+\.s}, z[0-9]+\.s\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, {z[0-9]+\.h}, z[0-9]+\.h\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, {z[0-9]+\.b}, z[0-9]+\.b\n} 1 } } */
@@ -25,7 +25,7 @@ VEC_PERM (vnx2df, vnx2di)
VEC_PERM (vnx4sf, vnx4si)
VEC_PERM (vnx8hf, vnx8hi)
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 2 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 2 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 2 } } */
-/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 1 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.d, {z[0-9]+\.d}, z[0-9]+\.d\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.s, {z[0-9]+\.s}, z[0-9]+\.s\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.h, {z[0-9]+\.h}, z[0-9]+\.h\n} 2 } } */
+/* { dg-final { scan-assembler-times {\ttbl\tz[0-9]+\.b, {z[0-9]+\.b}, z[0-9]+\.b\n} 1 } } */