(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
qincb_pat_u64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qincb_pat_n_1_u64_tied:
       7  **	uqincb	x0, pow2
       8  **	ret
       9  */
      10  TEST_UNIFORM_S (qincb_pat_n_1_u64_tied, uint64_t,
      11  		x0 = svqincb_pat_n_u64 (x0, SV_POW2, 1),
      12  		x0 = svqincb_pat (x0, SV_POW2, 1))
      13  
      14  /*
      15  ** qincb_pat_n_1_u64_untied:
      16  **	mov	x0, x1
      17  **	uqincb	x0, pow2
      18  **	ret
      19  */
      20  TEST_UNIFORM_S (qincb_pat_n_1_u64_untied, uint64_t,
      21  		x0 = svqincb_pat_n_u64 (x1, SV_POW2, 1),
      22  		x0 = svqincb_pat (x1, SV_POW2, 1))
      23  
      24  /*
      25  ** qincb_pat_n_2_u64:
      26  **	uqincb	x0, pow2, mul #2
      27  **	ret
      28  */
      29  TEST_UNIFORM_S (qincb_pat_n_2_u64, uint64_t,
      30  		x0 = svqincb_pat_n_u64 (x0, SV_POW2, 2),
      31  		x0 = svqincb_pat (x0, SV_POW2, 2))
      32  
      33  /*
      34  ** qincb_pat_n_7_u64:
      35  **	uqincb	x0, pow2, mul #7
      36  **	ret
      37  */
      38  TEST_UNIFORM_S (qincb_pat_n_7_u64, uint64_t,
      39  		x0 = svqincb_pat_n_u64 (x0, SV_POW2, 7),
      40  		x0 = svqincb_pat (x0, SV_POW2, 7))
      41  
      42  /*
      43  ** qincb_pat_n_15_u64:
      44  **	uqincb	x0, pow2, mul #15
      45  **	ret
      46  */
      47  TEST_UNIFORM_S (qincb_pat_n_15_u64, uint64_t,
      48  		x0 = svqincb_pat_n_u64 (x0, SV_POW2, 15),
      49  		x0 = svqincb_pat (x0, SV_POW2, 15))
      50  
      51  /*
      52  ** qincb_pat_n_16_u64:
      53  **	uqincb	x0, pow2, mul #16
      54  **	ret
      55  */
      56  TEST_UNIFORM_S (qincb_pat_n_16_u64, uint64_t,
      57  		x0 = svqincb_pat_n_u64 (x0, SV_POW2, 16),
      58  		x0 = svqincb_pat (x0, SV_POW2, 16))
      59  
      60  /*
      61  ** qincb_pat_n_vl1_u64:
      62  **	uqincb	x0, vl1, mul #16
      63  **	ret
      64  */
      65  TEST_UNIFORM_S (qincb_pat_n_vl1_u64, uint64_t,
      66  		x0 = svqincb_pat_n_u64 (x0, SV_VL1, 16),
      67  		x0 = svqincb_pat (x0, SV_VL1, 16))
      68  
      69  /*
      70  ** qincb_pat_n_vl2_u64:
      71  **	uqincb	x0, vl2, mul #16
      72  **	ret
      73  */
      74  TEST_UNIFORM_S (qincb_pat_n_vl2_u64, uint64_t,
      75  		x0 = svqincb_pat_n_u64 (x0, SV_VL2, 16),
      76  		x0 = svqincb_pat (x0, SV_VL2, 16))
      77  
      78  /*
      79  ** qincb_pat_n_vl3_u64:
      80  **	uqincb	x0, vl3, mul #16
      81  **	ret
      82  */
      83  TEST_UNIFORM_S (qincb_pat_n_vl3_u64, uint64_t,
      84  		x0 = svqincb_pat_n_u64 (x0, SV_VL3, 16),
      85  		x0 = svqincb_pat (x0, SV_VL3, 16))
      86  
      87  /*
      88  ** qincb_pat_n_vl4_u64:
      89  **	uqincb	x0, vl4, mul #16
      90  **	ret
      91  */
      92  TEST_UNIFORM_S (qincb_pat_n_vl4_u64, uint64_t,
      93  		x0 = svqincb_pat_n_u64 (x0, SV_VL4, 16),
      94  		x0 = svqincb_pat (x0, SV_VL4, 16))
      95  
      96  /*
      97  ** qincb_pat_n_vl5_u64:
      98  **	uqincb	x0, vl5, mul #16
      99  **	ret
     100  */
     101  TEST_UNIFORM_S (qincb_pat_n_vl5_u64, uint64_t,
     102  		x0 = svqincb_pat_n_u64 (x0, SV_VL5, 16),
     103  		x0 = svqincb_pat (x0, SV_VL5, 16))
     104  
     105  /*
     106  ** qincb_pat_n_vl6_u64:
     107  **	uqincb	x0, vl6, mul #16
     108  **	ret
     109  */
     110  TEST_UNIFORM_S (qincb_pat_n_vl6_u64, uint64_t,
     111  		x0 = svqincb_pat_n_u64 (x0, SV_VL6, 16),
     112  		x0 = svqincb_pat (x0, SV_VL6, 16))
     113  
     114  /*
     115  ** qincb_pat_n_vl7_u64:
     116  **	uqincb	x0, vl7, mul #16
     117  **	ret
     118  */
     119  TEST_UNIFORM_S (qincb_pat_n_vl7_u64, uint64_t,
     120  		x0 = svqincb_pat_n_u64 (x0, SV_VL7, 16),
     121  		x0 = svqincb_pat (x0, SV_VL7, 16))
     122  
     123  /*
     124  ** qincb_pat_n_vl8_u64:
     125  **	uqincb	x0, vl8, mul #16
     126  **	ret
     127  */
     128  TEST_UNIFORM_S (qincb_pat_n_vl8_u64, uint64_t,
     129  		x0 = svqincb_pat_n_u64 (x0, SV_VL8, 16),
     130  		x0 = svqincb_pat (x0, SV_VL8, 16))
     131  
     132  /*
     133  ** qincb_pat_n_vl16_u64:
     134  **	uqincb	x0, vl16, mul #16
     135  **	ret
     136  */
     137  TEST_UNIFORM_S (qincb_pat_n_vl16_u64, uint64_t,
     138  		x0 = svqincb_pat_n_u64 (x0, SV_VL16, 16),
     139  		x0 = svqincb_pat (x0, SV_VL16, 16))
     140  
     141  /*
     142  ** qincb_pat_n_vl32_u64:
     143  **	uqincb	x0, vl32, mul #16
     144  **	ret
     145  */
     146  TEST_UNIFORM_S (qincb_pat_n_vl32_u64, uint64_t,
     147  		x0 = svqincb_pat_n_u64 (x0, SV_VL32, 16),
     148  		x0 = svqincb_pat (x0, SV_VL32, 16))
     149  
     150  /*
     151  ** qincb_pat_n_vl64_u64:
     152  **	uqincb	x0, vl64, mul #16
     153  **	ret
     154  */
     155  TEST_UNIFORM_S (qincb_pat_n_vl64_u64, uint64_t,
     156  		x0 = svqincb_pat_n_u64 (x0, SV_VL64, 16),
     157  		x0 = svqincb_pat (x0, SV_VL64, 16))
     158  
     159  /*
     160  ** qincb_pat_n_vl128_u64:
     161  **	uqincb	x0, vl128, mul #16
     162  **	ret
     163  */
     164  TEST_UNIFORM_S (qincb_pat_n_vl128_u64, uint64_t,
     165  		x0 = svqincb_pat_n_u64 (x0, SV_VL128, 16),
     166  		x0 = svqincb_pat (x0, SV_VL128, 16))
     167  
     168  /*
     169  ** qincb_pat_n_vl256_u64:
     170  **	uqincb	x0, vl256, mul #16
     171  **	ret
     172  */
     173  TEST_UNIFORM_S (qincb_pat_n_vl256_u64, uint64_t,
     174  		x0 = svqincb_pat_n_u64 (x0, SV_VL256, 16),
     175  		x0 = svqincb_pat (x0, SV_VL256, 16))
     176  
     177  /*
     178  ** qincb_pat_n_mul4_u64:
     179  **	uqincb	x0, mul4, mul #16
     180  **	ret
     181  */
     182  TEST_UNIFORM_S (qincb_pat_n_mul4_u64, uint64_t,
     183  		x0 = svqincb_pat_n_u64 (x0, SV_MUL4, 16),
     184  		x0 = svqincb_pat (x0, SV_MUL4, 16))
     185  
     186  /*
     187  ** qincb_pat_n_mul3_u64:
     188  **	uqincb	x0, mul3, mul #16
     189  **	ret
     190  */
     191  TEST_UNIFORM_S (qincb_pat_n_mul3_u64, uint64_t,
     192  		x0 = svqincb_pat_n_u64 (x0, SV_MUL3, 16),
     193  		x0 = svqincb_pat (x0, SV_MUL3, 16))
     194  
     195  /*
     196  ** qincb_pat_n_all_u64:
     197  **	uqincb	x0, all, mul #16
     198  **	ret
     199  */
     200  TEST_UNIFORM_S (qincb_pat_n_all_u64, uint64_t,
     201  		x0 = svqincb_pat_n_u64 (x0, SV_ALL, 16),
     202  		x0 = svqincb_pat (x0, SV_ALL, 16))