(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
dup_lane_u64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** dup_lane_x0_u64_tied1:
       7  **	mov	(z[0-9]+\.d), x0
       8  **	tbl	z0\.d, z0\.d, \1
       9  **	ret
      10  */
      11  TEST_UNIFORM_ZX (dup_lane_x0_u64_tied1, svuint64_t, uint64_t,
      12  		 z0 = svdup_lane_u64 (z0, x0),
      13  		 z0 = svdup_lane (z0, x0))
      14  
      15  /*
      16  ** dup_lane_x0_u64_untied:
      17  **	mov	(z[0-9]+\.d), x0
      18  **	tbl	z0\.d, z1\.d, \1
      19  **	ret
      20  */
      21  TEST_UNIFORM_ZX (dup_lane_x0_u64_untied, svuint64_t, uint64_t,
      22  		 z0 = svdup_lane_u64 (z1, x0),
      23  		 z0 = svdup_lane (z1, x0))
      24  
      25  /*
      26  ** dup_lane_0_u64_tied1:
      27  **	dup	z0\.d, z0\.d\[0\]
      28  **	ret
      29  */
      30  TEST_UNIFORM_Z (dup_lane_0_u64_tied1, svuint64_t,
      31  		z0 = svdup_lane_u64 (z0, 0),
      32  		z0 = svdup_lane (z0, 0))
      33  
      34  /*
      35  ** dup_lane_0_u64_untied:
      36  **	dup	z0\.d, z1\.d\[0\]
      37  **	ret
      38  */
      39  TEST_UNIFORM_Z (dup_lane_0_u64_untied, svuint64_t,
      40  		z0 = svdup_lane_u64 (z1, 0),
      41  		z0 = svdup_lane (z1, 0))
      42  
      43  /*
      44  ** dup_lane_7_u64:
      45  **	dup	z0\.d, z0\.d\[7\]
      46  **	ret
      47  */
      48  TEST_UNIFORM_Z (dup_lane_7_u64, svuint64_t,
      49  		z0 = svdup_lane_u64 (z0, 7),
      50  		z0 = svdup_lane (z0, 7))
      51  
      52  /*
      53  ** dup_lane_8_u64:
      54  **	mov	(z[0-9]+\.d), #8
      55  **	tbl	z0\.d, z0\.d, \1
      56  **	ret
      57  */
      58  TEST_UNIFORM_Z (dup_lane_8_u64, svuint64_t,
      59  		z0 = svdup_lane_u64 (z0, 8),
      60  		z0 = svdup_lane (z0, 8))
      61  
      62  /*
      63  ** dup_lane_15_u64:
      64  **	mov	(z[0-9]+\.d), #15
      65  **	tbl	z0\.d, z0\.d, \1
      66  **	ret
      67  */
      68  TEST_UNIFORM_Z (dup_lane_15_u64, svuint64_t,
      69  		z0 = svdup_lane_u64 (z0, 15),
      70  		z0 = svdup_lane (z0, 15))
      71  
      72  /*
      73  ** dup_lane_16_u64:
      74  **	mov	(z[0-9]+\.d), #16
      75  **	tbl	z0\.d, z0\.d, \1
      76  **	ret
      77  */
      78  TEST_UNIFORM_Z (dup_lane_16_u64, svuint64_t,
      79  		z0 = svdup_lane_u64 (z0, 16),
      80  		z0 = svdup_lane (z0, 16))
      81  
      82  /*
      83  ** dup_lane_31_u64:
      84  **	mov	(z[0-9]+\.d), #31
      85  **	tbl	z0\.d, z0\.d, \1
      86  **	ret
      87  */
      88  TEST_UNIFORM_Z (dup_lane_31_u64, svuint64_t,
      89  		z0 = svdup_lane_u64 (z0, 31),
      90  		z0 = svdup_lane (z0, 31))
      91  
      92  /*
      93  ** dup_lane_32_u64:
      94  **	mov	(z[0-9]+\.d), #32
      95  **	tbl	z0\.d, z0\.d, \1
      96  **	ret
      97  */
      98  TEST_UNIFORM_Z (dup_lane_32_u64, svuint64_t,
      99  		z0 = svdup_lane_u64 (z0, 32),
     100  		z0 = svdup_lane (z0, 32))
     101  
     102  /*
     103  ** dup_lane_63_u64:
     104  **	mov	(z[0-9]+\.d), #63
     105  **	tbl	z0\.d, z0\.d, \1
     106  **	ret
     107  */
     108  TEST_UNIFORM_Z (dup_lane_63_u64, svuint64_t,
     109  		z0 = svdup_lane_u64 (z0, 63),
     110  		z0 = svdup_lane (z0, 63))
     111  
     112  /*
     113  ** dup_lane_64_u64:
     114  **	mov	(z[0-9]+\.d), #64
     115  **	tbl	z0\.d, z0\.d, \1
     116  **	ret
     117  */
     118  TEST_UNIFORM_Z (dup_lane_64_u64, svuint64_t,
     119  		z0 = svdup_lane_u64 (z0, 64),
     120  		z0 = svdup_lane (z0, 64))
     121  
     122  /*
     123  ** dup_lane_255_u64:
     124  **	mov	(z[0-9]+\.d), #255
     125  **	tbl	z0\.d, z0\.d, \1
     126  **	ret
     127  */
     128  TEST_UNIFORM_Z (dup_lane_255_u64, svuint64_t,
     129  		z0 = svdup_lane_u64 (z0, 255),
     130  		z0 = svdup_lane (z0, 255))