1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qincd_1_u64_tied:
       7  **	uqincd	z0\.d
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qincd_1_u64_tied, svuint64_t,
      11  		z0 = svqincd_u64 (z0, 1),
      12  		z0 = svqincd (z0, 1))
      13  
      14  /*
      15  ** qincd_1_u64_untied:
      16  **	movprfx	z0, z1
      17  **	uqincd	z0\.d
      18  **	ret
      19  */
      20  TEST_UNIFORM_Z (qincd_1_u64_untied, svuint64_t,
      21  		z0 = svqincd_u64 (z1, 1),
      22  		z0 = svqincd (z1, 1))
      23  
      24  /*
      25  ** qincd_2_u64:
      26  **	uqincd	z0\.d, all, mul #2
      27  **	ret
      28  */
      29  TEST_UNIFORM_Z (qincd_2_u64, svuint64_t,
      30  		z0 = svqincd_u64 (z0, 2),
      31  		z0 = svqincd (z0, 2))
      32  
      33  /*
      34  ** qincd_7_u64:
      35  **	uqincd	z0\.d, all, mul #7
      36  **	ret
      37  */
      38  TEST_UNIFORM_Z (qincd_7_u64, svuint64_t,
      39  		z0 = svqincd_u64 (z0, 7),
      40  		z0 = svqincd (z0, 7))
      41  
      42  /*
      43  ** qincd_15_u64:
      44  **	uqincd	z0\.d, all, mul #15
      45  **	ret
      46  */
      47  TEST_UNIFORM_Z (qincd_15_u64, svuint64_t,
      48  		z0 = svqincd_u64 (z0, 15),
      49  		z0 = svqincd (z0, 15))
      50  
      51  /*
      52  ** qincd_16_u64:
      53  **	uqincd	z0\.d, all, mul #16
      54  **	ret
      55  */
      56  TEST_UNIFORM_Z (qincd_16_u64, svuint64_t,
      57  		z0 = svqincd_u64 (z0, 16),
      58  		z0 = svqincd (z0, 16))
      59  
      60  /*
      61  ** qincd_n_1_u64_tied:
      62  **	uqincd	x0
      63  **	ret
      64  */
      65  TEST_UNIFORM_S (qincd_n_1_u64_tied, uint64_t,
      66  		x0 = svqincd_n_u64 (x0, 1),
      67  		x0 = svqincd (x0, 1))
      68  
      69  /*
      70  ** qincd_n_1_u64_untied:
      71  **	mov	x0, x1
      72  **	uqincd	x0
      73  **	ret
      74  */
      75  TEST_UNIFORM_S (qincd_n_1_u64_untied, uint64_t,
      76  		x0 = svqincd_n_u64 (x1, 1),
      77  		x0 = svqincd (x1, 1))
      78  
      79  /*
      80  ** qincd_n_2_u64:
      81  **	uqincd	x0, all, mul #2
      82  **	ret
      83  */
      84  TEST_UNIFORM_S (qincd_n_2_u64, uint64_t,
      85  		x0 = svqincd_n_u64 (x0, 2),
      86  		x0 = svqincd (x0, 2))
      87  
      88  /*
      89  ** qincd_n_7_u64:
      90  **	uqincd	x0, all, mul #7
      91  **	ret
      92  */
      93  TEST_UNIFORM_S (qincd_n_7_u64, uint64_t,
      94  		x0 = svqincd_n_u64 (x0, 7),
      95  		x0 = svqincd (x0, 7))
      96  
      97  /*
      98  ** qincd_n_15_u64:
      99  **	uqincd	x0, all, mul #15
     100  **	ret
     101  */
     102  TEST_UNIFORM_S (qincd_n_15_u64, uint64_t,
     103  		x0 = svqincd_n_u64 (x0, 15),
     104  		x0 = svqincd (x0, 15))
     105  
     106  /*
     107  ** qincd_n_16_u64:
     108  **	uqincd	x0, all, mul #16
     109  **	ret
     110  */
     111  TEST_UNIFORM_S (qincd_n_16_u64, uint64_t,
     112  		x0 = svqincd_n_u64 (x0, 16),
     113  		x0 = svqincd (x0, 16))