(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
qincd_pat_u64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qincd_pat_1_u64_tied:
       7  **	uqincd	z0\.d, pow2
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qincd_pat_1_u64_tied, svuint64_t,
      11  		z0 = svqincd_pat_u64 (z0, SV_POW2, 1),
      12  		z0 = svqincd_pat (z0, SV_POW2, 1))
      13  
      14  /*
      15  ** qincd_pat_1_u64_untied:
      16  **	movprfx	z0, z1
      17  **	uqincd	z0\.d, pow2
      18  **	ret
      19  */
      20  TEST_UNIFORM_Z (qincd_pat_1_u64_untied, svuint64_t,
      21  		z0 = svqincd_pat_u64 (z1, SV_POW2, 1),
      22  		z0 = svqincd_pat (z1, SV_POW2, 1))
      23  
      24  /*
      25  ** qincd_pat_2_u64:
      26  **	uqincd	z0\.d, pow2, mul #2
      27  **	ret
      28  */
      29  TEST_UNIFORM_Z (qincd_pat_2_u64, svuint64_t,
      30  		z0 = svqincd_pat_u64 (z0, SV_POW2, 2),
      31  		z0 = svqincd_pat (z0, SV_POW2, 2))
      32  
      33  /*
      34  ** qincd_pat_7_u64:
      35  **	uqincd	z0\.d, pow2, mul #7
      36  **	ret
      37  */
      38  TEST_UNIFORM_Z (qincd_pat_7_u64, svuint64_t,
      39  		z0 = svqincd_pat_u64 (z0, SV_POW2, 7),
      40  		z0 = svqincd_pat (z0, SV_POW2, 7))
      41  
      42  /*
      43  ** qincd_pat_15_u64:
      44  **	uqincd	z0\.d, pow2, mul #15
      45  **	ret
      46  */
      47  TEST_UNIFORM_Z (qincd_pat_15_u64, svuint64_t,
      48  		z0 = svqincd_pat_u64 (z0, SV_POW2, 15),
      49  		z0 = svqincd_pat (z0, SV_POW2, 15))
      50  
      51  /*
      52  ** qincd_pat_16_u64:
      53  **	uqincd	z0\.d, pow2, mul #16
      54  **	ret
      55  */
      56  TEST_UNIFORM_Z (qincd_pat_16_u64, svuint64_t,
      57  		z0 = svqincd_pat_u64 (z0, SV_POW2, 16),
      58  		z0 = svqincd_pat (z0, SV_POW2, 16))
      59  
      60  /*
      61  ** qincd_pat_vl1_u64:
      62  **	uqincd	z0\.d, vl1, mul #16
      63  **	ret
      64  */
      65  TEST_UNIFORM_Z (qincd_pat_vl1_u64, svuint64_t,
      66  		z0 = svqincd_pat_u64 (z0, SV_VL1, 16),
      67  		z0 = svqincd_pat (z0, SV_VL1, 16))
      68  
      69  /*
      70  ** qincd_pat_vl2_u64:
      71  **	uqincd	z0\.d, vl2, mul #16
      72  **	ret
      73  */
      74  TEST_UNIFORM_Z (qincd_pat_vl2_u64, svuint64_t,
      75  		z0 = svqincd_pat_u64 (z0, SV_VL2, 16),
      76  		z0 = svqincd_pat (z0, SV_VL2, 16))
      77  
      78  /*
      79  ** qincd_pat_vl3_u64:
      80  **	uqincd	z0\.d, vl3, mul #16
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (qincd_pat_vl3_u64, svuint64_t,
      84  		z0 = svqincd_pat_u64 (z0, SV_VL3, 16),
      85  		z0 = svqincd_pat (z0, SV_VL3, 16))
      86  
      87  /*
      88  ** qincd_pat_vl4_u64:
      89  **	uqincd	z0\.d, vl4, mul #16
      90  **	ret
      91  */
      92  TEST_UNIFORM_Z (qincd_pat_vl4_u64, svuint64_t,
      93  		z0 = svqincd_pat_u64 (z0, SV_VL4, 16),
      94  		z0 = svqincd_pat (z0, SV_VL4, 16))
      95  
      96  /*
      97  ** qincd_pat_vl5_u64:
      98  **	uqincd	z0\.d, vl5, mul #16
      99  **	ret
     100  */
     101  TEST_UNIFORM_Z (qincd_pat_vl5_u64, svuint64_t,
     102  		z0 = svqincd_pat_u64 (z0, SV_VL5, 16),
     103  		z0 = svqincd_pat (z0, SV_VL5, 16))
     104  
     105  /*
     106  ** qincd_pat_vl6_u64:
     107  **	uqincd	z0\.d, vl6, mul #16
     108  **	ret
     109  */
     110  TEST_UNIFORM_Z (qincd_pat_vl6_u64, svuint64_t,
     111  		z0 = svqincd_pat_u64 (z0, SV_VL6, 16),
     112  		z0 = svqincd_pat (z0, SV_VL6, 16))
     113  
     114  /*
     115  ** qincd_pat_vl7_u64:
     116  **	uqincd	z0\.d, vl7, mul #16
     117  **	ret
     118  */
     119  TEST_UNIFORM_Z (qincd_pat_vl7_u64, svuint64_t,
     120  		z0 = svqincd_pat_u64 (z0, SV_VL7, 16),
     121  		z0 = svqincd_pat (z0, SV_VL7, 16))
     122  
     123  /*
     124  ** qincd_pat_vl8_u64:
     125  **	uqincd	z0\.d, vl8, mul #16
     126  **	ret
     127  */
     128  TEST_UNIFORM_Z (qincd_pat_vl8_u64, svuint64_t,
     129  		z0 = svqincd_pat_u64 (z0, SV_VL8, 16),
     130  		z0 = svqincd_pat (z0, SV_VL8, 16))
     131  
     132  /*
     133  ** qincd_pat_vl16_u64:
     134  **	uqincd	z0\.d, vl16, mul #16
     135  **	ret
     136  */
     137  TEST_UNIFORM_Z (qincd_pat_vl16_u64, svuint64_t,
     138  		z0 = svqincd_pat_u64 (z0, SV_VL16, 16),
     139  		z0 = svqincd_pat (z0, SV_VL16, 16))
     140  
     141  /*
     142  ** qincd_pat_vl32_u64:
     143  **	uqincd	z0\.d, vl32, mul #16
     144  **	ret
     145  */
     146  TEST_UNIFORM_Z (qincd_pat_vl32_u64, svuint64_t,
     147  		z0 = svqincd_pat_u64 (z0, SV_VL32, 16),
     148  		z0 = svqincd_pat (z0, SV_VL32, 16))
     149  
     150  /*
     151  ** qincd_pat_vl64_u64:
     152  **	uqincd	z0\.d, vl64, mul #16
     153  **	ret
     154  */
     155  TEST_UNIFORM_Z (qincd_pat_vl64_u64, svuint64_t,
     156  		z0 = svqincd_pat_u64 (z0, SV_VL64, 16),
     157  		z0 = svqincd_pat (z0, SV_VL64, 16))
     158  
     159  /*
     160  ** qincd_pat_vl128_u64:
     161  **	uqincd	z0\.d, vl128, mul #16
     162  **	ret
     163  */
     164  TEST_UNIFORM_Z (qincd_pat_vl128_u64, svuint64_t,
     165  		z0 = svqincd_pat_u64 (z0, SV_VL128, 16),
     166  		z0 = svqincd_pat (z0, SV_VL128, 16))
     167  
     168  /*
     169  ** qincd_pat_vl256_u64:
     170  **	uqincd	z0\.d, vl256, mul #16
     171  **	ret
     172  */
     173  TEST_UNIFORM_Z (qincd_pat_vl256_u64, svuint64_t,
     174  		z0 = svqincd_pat_u64 (z0, SV_VL256, 16),
     175  		z0 = svqincd_pat (z0, SV_VL256, 16))
     176  
     177  /*
     178  ** qincd_pat_mul4_u64:
     179  **	uqincd	z0\.d, mul4, mul #16
     180  **	ret
     181  */
     182  TEST_UNIFORM_Z (qincd_pat_mul4_u64, svuint64_t,
     183  		z0 = svqincd_pat_u64 (z0, SV_MUL4, 16),
     184  		z0 = svqincd_pat (z0, SV_MUL4, 16))
     185  
     186  /*
     187  ** qincd_pat_mul3_u64:
     188  **	uqincd	z0\.d, mul3, mul #16
     189  **	ret
     190  */
     191  TEST_UNIFORM_Z (qincd_pat_mul3_u64, svuint64_t,
     192  		z0 = svqincd_pat_u64 (z0, SV_MUL3, 16),
     193  		z0 = svqincd_pat (z0, SV_MUL3, 16))
     194  
     195  /*
     196  ** qincd_pat_all_u64:
     197  **	uqincd	z0\.d, all, mul #16
     198  **	ret
     199  */
     200  TEST_UNIFORM_Z (qincd_pat_all_u64, svuint64_t,
     201  		z0 = svqincd_pat_u64 (z0, SV_ALL, 16),
     202  		z0 = svqincd_pat (z0, SV_ALL, 16))
     203  
     204  /*
     205  ** qincd_pat_n_1_u64_tied:
     206  **	uqincd	x0, pow2
     207  **	ret
     208  */
     209  TEST_UNIFORM_S (qincd_pat_n_1_u64_tied, uint64_t,
     210  		x0 = svqincd_pat_n_u64 (x0, SV_POW2, 1),
     211  		x0 = svqincd_pat (x0, SV_POW2, 1))
     212  
     213  /*
     214  ** qincd_pat_n_1_u64_untied:
     215  **	mov	x0, x1
     216  **	uqincd	x0, pow2
     217  **	ret
     218  */
     219  TEST_UNIFORM_S (qincd_pat_n_1_u64_untied, uint64_t,
     220  		x0 = svqincd_pat_n_u64 (x1, SV_POW2, 1),
     221  		x0 = svqincd_pat (x1, SV_POW2, 1))
     222  
     223  /*
     224  ** qincd_pat_n_2_u64:
     225  **	uqincd	x0, pow2, mul #2
     226  **	ret
     227  */
     228  TEST_UNIFORM_S (qincd_pat_n_2_u64, uint64_t,
     229  		x0 = svqincd_pat_n_u64 (x0, SV_POW2, 2),
     230  		x0 = svqincd_pat (x0, SV_POW2, 2))
     231  
     232  /*
     233  ** qincd_pat_n_7_u64:
     234  **	uqincd	x0, pow2, mul #7
     235  **	ret
     236  */
     237  TEST_UNIFORM_S (qincd_pat_n_7_u64, uint64_t,
     238  		x0 = svqincd_pat_n_u64 (x0, SV_POW2, 7),
     239  		x0 = svqincd_pat (x0, SV_POW2, 7))
     240  
     241  /*
     242  ** qincd_pat_n_15_u64:
     243  **	uqincd	x0, pow2, mul #15
     244  **	ret
     245  */
     246  TEST_UNIFORM_S (qincd_pat_n_15_u64, uint64_t,
     247  		x0 = svqincd_pat_n_u64 (x0, SV_POW2, 15),
     248  		x0 = svqincd_pat (x0, SV_POW2, 15))
     249  
     250  /*
     251  ** qincd_pat_n_16_u64:
     252  **	uqincd	x0, pow2, mul #16
     253  **	ret
     254  */
     255  TEST_UNIFORM_S (qincd_pat_n_16_u64, uint64_t,
     256  		x0 = svqincd_pat_n_u64 (x0, SV_POW2, 16),
     257  		x0 = svqincd_pat (x0, SV_POW2, 16))
     258  
     259  /*
     260  ** qincd_pat_n_vl1_u64:
     261  **	uqincd	x0, vl1, mul #16
     262  **	ret
     263  */
     264  TEST_UNIFORM_S (qincd_pat_n_vl1_u64, uint64_t,
     265  		x0 = svqincd_pat_n_u64 (x0, SV_VL1, 16),
     266  		x0 = svqincd_pat (x0, SV_VL1, 16))
     267  
     268  /*
     269  ** qincd_pat_n_vl2_u64:
     270  **	uqincd	x0, vl2, mul #16
     271  **	ret
     272  */
     273  TEST_UNIFORM_S (qincd_pat_n_vl2_u64, uint64_t,
     274  		x0 = svqincd_pat_n_u64 (x0, SV_VL2, 16),
     275  		x0 = svqincd_pat (x0, SV_VL2, 16))
     276  
     277  /*
     278  ** qincd_pat_n_vl3_u64:
     279  **	uqincd	x0, vl3, mul #16
     280  **	ret
     281  */
     282  TEST_UNIFORM_S (qincd_pat_n_vl3_u64, uint64_t,
     283  		x0 = svqincd_pat_n_u64 (x0, SV_VL3, 16),
     284  		x0 = svqincd_pat (x0, SV_VL3, 16))
     285  
     286  /*
     287  ** qincd_pat_n_vl4_u64:
     288  **	uqincd	x0, vl4, mul #16
     289  **	ret
     290  */
     291  TEST_UNIFORM_S (qincd_pat_n_vl4_u64, uint64_t,
     292  		x0 = svqincd_pat_n_u64 (x0, SV_VL4, 16),
     293  		x0 = svqincd_pat (x0, SV_VL4, 16))
     294  
     295  /*
     296  ** qincd_pat_n_vl5_u64:
     297  **	uqincd	x0, vl5, mul #16
     298  **	ret
     299  */
     300  TEST_UNIFORM_S (qincd_pat_n_vl5_u64, uint64_t,
     301  		x0 = svqincd_pat_n_u64 (x0, SV_VL5, 16),
     302  		x0 = svqincd_pat (x0, SV_VL5, 16))
     303  
     304  /*
     305  ** qincd_pat_n_vl6_u64:
     306  **	uqincd	x0, vl6, mul #16
     307  **	ret
     308  */
     309  TEST_UNIFORM_S (qincd_pat_n_vl6_u64, uint64_t,
     310  		x0 = svqincd_pat_n_u64 (x0, SV_VL6, 16),
     311  		x0 = svqincd_pat (x0, SV_VL6, 16))
     312  
     313  /*
     314  ** qincd_pat_n_vl7_u64:
     315  **	uqincd	x0, vl7, mul #16
     316  **	ret
     317  */
     318  TEST_UNIFORM_S (qincd_pat_n_vl7_u64, uint64_t,
     319  		x0 = svqincd_pat_n_u64 (x0, SV_VL7, 16),
     320  		x0 = svqincd_pat (x0, SV_VL7, 16))
     321  
     322  /*
     323  ** qincd_pat_n_vl8_u64:
     324  **	uqincd	x0, vl8, mul #16
     325  **	ret
     326  */
     327  TEST_UNIFORM_S (qincd_pat_n_vl8_u64, uint64_t,
     328  		x0 = svqincd_pat_n_u64 (x0, SV_VL8, 16),
     329  		x0 = svqincd_pat (x0, SV_VL8, 16))
     330  
     331  /*
     332  ** qincd_pat_n_vl16_u64:
     333  **	uqincd	x0, vl16, mul #16
     334  **	ret
     335  */
     336  TEST_UNIFORM_S (qincd_pat_n_vl16_u64, uint64_t,
     337  		x0 = svqincd_pat_n_u64 (x0, SV_VL16, 16),
     338  		x0 = svqincd_pat (x0, SV_VL16, 16))
     339  
     340  /*
     341  ** qincd_pat_n_vl32_u64:
     342  **	uqincd	x0, vl32, mul #16
     343  **	ret
     344  */
     345  TEST_UNIFORM_S (qincd_pat_n_vl32_u64, uint64_t,
     346  		x0 = svqincd_pat_n_u64 (x0, SV_VL32, 16),
     347  		x0 = svqincd_pat (x0, SV_VL32, 16))
     348  
     349  /*
     350  ** qincd_pat_n_vl64_u64:
     351  **	uqincd	x0, vl64, mul #16
     352  **	ret
     353  */
     354  TEST_UNIFORM_S (qincd_pat_n_vl64_u64, uint64_t,
     355  		x0 = svqincd_pat_n_u64 (x0, SV_VL64, 16),
     356  		x0 = svqincd_pat (x0, SV_VL64, 16))
     357  
     358  /*
     359  ** qincd_pat_n_vl128_u64:
     360  **	uqincd	x0, vl128, mul #16
     361  **	ret
     362  */
     363  TEST_UNIFORM_S (qincd_pat_n_vl128_u64, uint64_t,
     364  		x0 = svqincd_pat_n_u64 (x0, SV_VL128, 16),
     365  		x0 = svqincd_pat (x0, SV_VL128, 16))
     366  
     367  /*
     368  ** qincd_pat_n_vl256_u64:
     369  **	uqincd	x0, vl256, mul #16
     370  **	ret
     371  */
     372  TEST_UNIFORM_S (qincd_pat_n_vl256_u64, uint64_t,
     373  		x0 = svqincd_pat_n_u64 (x0, SV_VL256, 16),
     374  		x0 = svqincd_pat (x0, SV_VL256, 16))
     375  
     376  /*
     377  ** qincd_pat_n_mul4_u64:
     378  **	uqincd	x0, mul4, mul #16
     379  **	ret
     380  */
     381  TEST_UNIFORM_S (qincd_pat_n_mul4_u64, uint64_t,
     382  		x0 = svqincd_pat_n_u64 (x0, SV_MUL4, 16),
     383  		x0 = svqincd_pat (x0, SV_MUL4, 16))
     384  
     385  /*
     386  ** qincd_pat_n_mul3_u64:
     387  **	uqincd	x0, mul3, mul #16
     388  **	ret
     389  */
     390  TEST_UNIFORM_S (qincd_pat_n_mul3_u64, uint64_t,
     391  		x0 = svqincd_pat_n_u64 (x0, SV_MUL3, 16),
     392  		x0 = svqincd_pat (x0, SV_MUL3, 16))
     393  
     394  /*
     395  ** qincd_pat_n_all_u64:
     396  **	uqincd	x0, all, mul #16
     397  **	ret
     398  */
     399  TEST_UNIFORM_S (qincd_pat_n_all_u64, uint64_t,
     400  		x0 = svqincd_pat_n_u64 (x0, SV_ALL, 16),
     401  		x0 = svqincd_pat (x0, SV_ALL, 16))