1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** dup_lane_x0_f64_tied1:
       7  **	mov	(z[0-9]+\.d), x0
       8  **	tbl	z0\.d, z0\.d, \1
       9  **	ret
      10  */
      11  TEST_UNIFORM_ZX (dup_lane_x0_f64_tied1, svfloat64_t, uint64_t,
      12  		 z0 = svdup_lane_f64 (z0, x0),
      13  		 z0 = svdup_lane (z0, x0))
      14  
      15  /*
      16  ** dup_lane_x0_f64_untied:
      17  **	mov	(z[0-9]+\.d), x0
      18  **	tbl	z0\.d, z1\.d, \1
      19  **	ret
      20  */
      21  TEST_UNIFORM_ZX (dup_lane_x0_f64_untied, svfloat64_t, uint64_t,
      22  		 z0 = svdup_lane_f64 (z1, x0),
      23  		 z0 = svdup_lane (z1, x0))
      24  
      25  /*
      26  ** dup_lane_0_f64_tied1:
      27  **	dup	z0\.d, z0\.d\[0\]
      28  **	ret
      29  */
      30  TEST_UNIFORM_Z (dup_lane_0_f64_tied1, svfloat64_t,
      31  		z0 = svdup_lane_f64 (z0, 0),
      32  		z0 = svdup_lane (z0, 0))
      33  
      34  /*
      35  ** dup_lane_0_f64_untied:
      36  **	dup	z0\.d, z1\.d\[0\]
      37  **	ret
      38  */
      39  TEST_UNIFORM_Z (dup_lane_0_f64_untied, svfloat64_t,
      40  		z0 = svdup_lane_f64 (z1, 0),
      41  		z0 = svdup_lane (z1, 0))
      42  
      43  /*
      44  ** dup_lane_15_f64:
      45  **	mov	(z[0-9]+\.d), #15
      46  **	tbl	z0\.d, z0\.d, \1
      47  **	ret
      48  */
      49  TEST_UNIFORM_Z (dup_lane_15_f64, svfloat64_t,
      50  		z0 = svdup_lane_f64 (z0, 15),
      51  		z0 = svdup_lane (z0, 15))
      52  
      53  /*
      54  ** dup_lane_16_f64:
      55  **	mov	(z[0-9]+\.d), #16
      56  **	tbl	z0\.d, z0\.d, \1
      57  **	ret
      58  */
      59  TEST_UNIFORM_Z (dup_lane_16_f64, svfloat64_t,
      60  		z0 = svdup_lane_f64 (z0, 16),
      61  		z0 = svdup_lane (z0, 16))
      62  
      63  /*
      64  ** dup_lane_31_f64:
      65  **	mov	(z[0-9]+\.d), #31
      66  **	tbl	z0\.d, z0\.d, \1
      67  **	ret
      68  */
      69  TEST_UNIFORM_Z (dup_lane_31_f64, svfloat64_t,
      70  		z0 = svdup_lane_f64 (z0, 31),
      71  		z0 = svdup_lane (z0, 31))
      72  
      73  /*
      74  ** dup_lane_32_f64:
      75  **	mov	(z[0-9]+\.d), #32
      76  **	tbl	z0\.d, z0\.d, \1
      77  **	ret
      78  */
      79  TEST_UNIFORM_Z (dup_lane_32_f64, svfloat64_t,
      80  		z0 = svdup_lane_f64 (z0, 32),
      81  		z0 = svdup_lane (z0, 32))
      82  
      83  /*
      84  ** dup_lane_63_f64:
      85  **	mov	(z[0-9]+\.d), #63
      86  **	tbl	z0\.d, z0\.d, \1
      87  **	ret
      88  */
      89  TEST_UNIFORM_Z (dup_lane_63_f64, svfloat64_t,
      90  		z0 = svdup_lane_f64 (z0, 63),
      91  		z0 = svdup_lane (z0, 63))
      92  
      93  /*
      94  ** dup_lane_64_f64:
      95  **	mov	(z[0-9]+\.d), #64
      96  **	tbl	z0\.d, z0\.d, \1
      97  **	ret
      98  */
      99  TEST_UNIFORM_Z (dup_lane_64_f64, svfloat64_t,
     100  		z0 = svdup_lane_f64 (z0, 64),
     101  		z0 = svdup_lane (z0, 64))
     102  
     103  /*
     104  ** dup_lane_255_f64:
     105  **	mov	(z[0-9]+\.d), #255
     106  **	tbl	z0\.d, z0\.d, \1
     107  **	ret
     108  */
     109  TEST_UNIFORM_Z (dup_lane_255_f64, svfloat64_t,
     110  		z0 = svdup_lane_f64 (z0, 255),
     111  		z0 = svdup_lane (z0, 255))