(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
qinch_pat_u16.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qinch_pat_1_u16_tied:
       7  **	uqinch	z0\.h, pow2
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qinch_pat_1_u16_tied, svuint16_t,
      11  		z0 = svqinch_pat_u16 (z0, SV_POW2, 1),
      12  		z0 = svqinch_pat (z0, SV_POW2, 1))
      13  
      14  /*
      15  ** qinch_pat_1_u16_untied:
      16  **	movprfx	z0, z1
      17  **	uqinch	z0\.h, pow2
      18  **	ret
      19  */
      20  TEST_UNIFORM_Z (qinch_pat_1_u16_untied, svuint16_t,
      21  		z0 = svqinch_pat_u16 (z1, SV_POW2, 1),
      22  		z0 = svqinch_pat (z1, SV_POW2, 1))
      23  
      24  /*
      25  ** qinch_pat_2_u16:
      26  **	uqinch	z0\.h, pow2, mul #2
      27  **	ret
      28  */
      29  TEST_UNIFORM_Z (qinch_pat_2_u16, svuint16_t,
      30  		z0 = svqinch_pat_u16 (z0, SV_POW2, 2),
      31  		z0 = svqinch_pat (z0, SV_POW2, 2))
      32  
      33  /*
      34  ** qinch_pat_7_u16:
      35  **	uqinch	z0\.h, pow2, mul #7
      36  **	ret
      37  */
      38  TEST_UNIFORM_Z (qinch_pat_7_u16, svuint16_t,
      39  		z0 = svqinch_pat_u16 (z0, SV_POW2, 7),
      40  		z0 = svqinch_pat (z0, SV_POW2, 7))
      41  
      42  /*
      43  ** qinch_pat_15_u16:
      44  **	uqinch	z0\.h, pow2, mul #15
      45  **	ret
      46  */
      47  TEST_UNIFORM_Z (qinch_pat_15_u16, svuint16_t,
      48  		z0 = svqinch_pat_u16 (z0, SV_POW2, 15),
      49  		z0 = svqinch_pat (z0, SV_POW2, 15))
      50  
      51  /*
      52  ** qinch_pat_16_u16:
      53  **	uqinch	z0\.h, pow2, mul #16
      54  **	ret
      55  */
      56  TEST_UNIFORM_Z (qinch_pat_16_u16, svuint16_t,
      57  		z0 = svqinch_pat_u16 (z0, SV_POW2, 16),
      58  		z0 = svqinch_pat (z0, SV_POW2, 16))
      59  
      60  /*
      61  ** qinch_pat_vl1_u16:
      62  **	uqinch	z0\.h, vl1, mul #16
      63  **	ret
      64  */
      65  TEST_UNIFORM_Z (qinch_pat_vl1_u16, svuint16_t,
      66  		z0 = svqinch_pat_u16 (z0, SV_VL1, 16),
      67  		z0 = svqinch_pat (z0, SV_VL1, 16))
      68  
      69  /*
      70  ** qinch_pat_vl2_u16:
      71  **	uqinch	z0\.h, vl2, mul #16
      72  **	ret
      73  */
      74  TEST_UNIFORM_Z (qinch_pat_vl2_u16, svuint16_t,
      75  		z0 = svqinch_pat_u16 (z0, SV_VL2, 16),
      76  		z0 = svqinch_pat (z0, SV_VL2, 16))
      77  
      78  /*
      79  ** qinch_pat_vl3_u16:
      80  **	uqinch	z0\.h, vl3, mul #16
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (qinch_pat_vl3_u16, svuint16_t,
      84  		z0 = svqinch_pat_u16 (z0, SV_VL3, 16),
      85  		z0 = svqinch_pat (z0, SV_VL3, 16))
      86  
      87  /*
      88  ** qinch_pat_vl4_u16:
      89  **	uqinch	z0\.h, vl4, mul #16
      90  **	ret
      91  */
      92  TEST_UNIFORM_Z (qinch_pat_vl4_u16, svuint16_t,
      93  		z0 = svqinch_pat_u16 (z0, SV_VL4, 16),
      94  		z0 = svqinch_pat (z0, SV_VL4, 16))
      95  
      96  /*
      97  ** qinch_pat_vl5_u16:
      98  **	uqinch	z0\.h, vl5, mul #16
      99  **	ret
     100  */
     101  TEST_UNIFORM_Z (qinch_pat_vl5_u16, svuint16_t,
     102  		z0 = svqinch_pat_u16 (z0, SV_VL5, 16),
     103  		z0 = svqinch_pat (z0, SV_VL5, 16))
     104  
     105  /*
     106  ** qinch_pat_vl6_u16:
     107  **	uqinch	z0\.h, vl6, mul #16
     108  **	ret
     109  */
     110  TEST_UNIFORM_Z (qinch_pat_vl6_u16, svuint16_t,
     111  		z0 = svqinch_pat_u16 (z0, SV_VL6, 16),
     112  		z0 = svqinch_pat (z0, SV_VL6, 16))
     113  
     114  /*
     115  ** qinch_pat_vl7_u16:
     116  **	uqinch	z0\.h, vl7, mul #16
     117  **	ret
     118  */
     119  TEST_UNIFORM_Z (qinch_pat_vl7_u16, svuint16_t,
     120  		z0 = svqinch_pat_u16 (z0, SV_VL7, 16),
     121  		z0 = svqinch_pat (z0, SV_VL7, 16))
     122  
     123  /*
     124  ** qinch_pat_vl8_u16:
     125  **	uqinch	z0\.h, vl8, mul #16
     126  **	ret
     127  */
     128  TEST_UNIFORM_Z (qinch_pat_vl8_u16, svuint16_t,
     129  		z0 = svqinch_pat_u16 (z0, SV_VL8, 16),
     130  		z0 = svqinch_pat (z0, SV_VL8, 16))
     131  
     132  /*
     133  ** qinch_pat_vl16_u16:
     134  **	uqinch	z0\.h, vl16, mul #16
     135  **	ret
     136  */
     137  TEST_UNIFORM_Z (qinch_pat_vl16_u16, svuint16_t,
     138  		z0 = svqinch_pat_u16 (z0, SV_VL16, 16),
     139  		z0 = svqinch_pat (z0, SV_VL16, 16))
     140  
     141  /*
     142  ** qinch_pat_vl32_u16:
     143  **	uqinch	z0\.h, vl32, mul #16
     144  **	ret
     145  */
     146  TEST_UNIFORM_Z (qinch_pat_vl32_u16, svuint16_t,
     147  		z0 = svqinch_pat_u16 (z0, SV_VL32, 16),
     148  		z0 = svqinch_pat (z0, SV_VL32, 16))
     149  
     150  /*
     151  ** qinch_pat_vl64_u16:
     152  **	uqinch	z0\.h, vl64, mul #16
     153  **	ret
     154  */
     155  TEST_UNIFORM_Z (qinch_pat_vl64_u16, svuint16_t,
     156  		z0 = svqinch_pat_u16 (z0, SV_VL64, 16),
     157  		z0 = svqinch_pat (z0, SV_VL64, 16))
     158  
     159  /*
     160  ** qinch_pat_vl128_u16:
     161  **	uqinch	z0\.h, vl128, mul #16
     162  **	ret
     163  */
     164  TEST_UNIFORM_Z (qinch_pat_vl128_u16, svuint16_t,
     165  		z0 = svqinch_pat_u16 (z0, SV_VL128, 16),
     166  		z0 = svqinch_pat (z0, SV_VL128, 16))
     167  
     168  /*
     169  ** qinch_pat_vl256_u16:
     170  **	uqinch	z0\.h, vl256, mul #16
     171  **	ret
     172  */
     173  TEST_UNIFORM_Z (qinch_pat_vl256_u16, svuint16_t,
     174  		z0 = svqinch_pat_u16 (z0, SV_VL256, 16),
     175  		z0 = svqinch_pat (z0, SV_VL256, 16))
     176  
     177  /*
     178  ** qinch_pat_mul4_u16:
     179  **	uqinch	z0\.h, mul4, mul #16
     180  **	ret
     181  */
     182  TEST_UNIFORM_Z (qinch_pat_mul4_u16, svuint16_t,
     183  		z0 = svqinch_pat_u16 (z0, SV_MUL4, 16),
     184  		z0 = svqinch_pat (z0, SV_MUL4, 16))
     185  
     186  /*
     187  ** qinch_pat_mul3_u16:
     188  **	uqinch	z0\.h, mul3, mul #16
     189  **	ret
     190  */
     191  TEST_UNIFORM_Z (qinch_pat_mul3_u16, svuint16_t,
     192  		z0 = svqinch_pat_u16 (z0, SV_MUL3, 16),
     193  		z0 = svqinch_pat (z0, SV_MUL3, 16))
     194  
     195  /*
     196  ** qinch_pat_all_u16:
     197  **	uqinch	z0\.h, all, mul #16
     198  **	ret
     199  */
     200  TEST_UNIFORM_Z (qinch_pat_all_u16, svuint16_t,
     201  		z0 = svqinch_pat_u16 (z0, SV_ALL, 16),
     202  		z0 = svqinch_pat (z0, SV_ALL, 16))