(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
lsl_u16.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** lsl_u16_m_tied1:
       7  **	lsl	z0\.h, p0/m, z0\.h, z1\.h
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (lsl_u16_m_tied1, svuint16_t,
      11  		z0 = svlsl_u16_m (p0, z0, z1),
      12  		z0 = svlsl_m (p0, z0, z1))
      13  
      14  /*
      15  ** lsl_u16_m_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z1
      18  **	lsl	z0\.h, p0/m, z0\.h, \1\.h
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (lsl_u16_m_tied2, svuint16_t,
      22  		z0 = svlsl_u16_m (p0, z1, z0),
      23  		z0 = svlsl_m (p0, z1, z0))
      24  
      25  /*
      26  ** lsl_u16_m_untied:
      27  **	movprfx	z0, z1
      28  **	lsl	z0\.h, p0/m, z0\.h, z2\.h
      29  **	ret
      30  */
      31  TEST_UNIFORM_Z (lsl_u16_m_untied, svuint16_t,
      32  		z0 = svlsl_u16_m (p0, z1, z2),
      33  		z0 = svlsl_m (p0, z1, z2))
      34  
      35  /*
      36  ** lsl_w0_u16_m_tied1:
      37  **	mov	(z[0-9]+\.h), w0
      38  **	lsl	z0\.h, p0/m, z0\.h, \1
      39  **	ret
      40  */
      41  TEST_UNIFORM_ZX (lsl_w0_u16_m_tied1, svuint16_t, uint16_t,
      42  		 z0 = svlsl_n_u16_m (p0, z0, x0),
      43  		 z0 = svlsl_m (p0, z0, x0))
      44  
      45  /*
      46  ** lsl_w0_u16_m_untied: { xfail *-*-* }
      47  **	mov	(z[0-9]+\.h), w0
      48  **	movprfx	z0, z1
      49  **	lsl	z0\.h, p0/m, z0\.h, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (lsl_w0_u16_m_untied, svuint16_t, uint16_t,
      53  		 z0 = svlsl_n_u16_m (p0, z1, x0),
      54  		 z0 = svlsl_m (p0, z1, x0))
      55  
      56  /*
      57  ** lsl_1_u16_m_tied1:
      58  **	lsl	z0\.h, p0/m, z0\.h, #1
      59  **	ret
      60  */
      61  TEST_UNIFORM_Z (lsl_1_u16_m_tied1, svuint16_t,
      62  		z0 = svlsl_n_u16_m (p0, z0, 1),
      63  		z0 = svlsl_m (p0, z0, 1))
      64  
      65  /*
      66  ** lsl_1_u16_m_untied:
      67  **	movprfx	z0, z1
      68  **	lsl	z0\.h, p0/m, z0\.h, #1
      69  **	ret
      70  */
      71  TEST_UNIFORM_Z (lsl_1_u16_m_untied, svuint16_t,
      72  		z0 = svlsl_n_u16_m (p0, z1, 1),
      73  		z0 = svlsl_m (p0, z1, 1))
      74  
      75  /*
      76  ** lsl_15_u16_m_tied1:
      77  **	lsl	z0\.h, p0/m, z0\.h, #15
      78  **	ret
      79  */
      80  TEST_UNIFORM_Z (lsl_15_u16_m_tied1, svuint16_t,
      81  		z0 = svlsl_n_u16_m (p0, z0, 15),
      82  		z0 = svlsl_m (p0, z0, 15))
      83  
      84  /*
      85  ** lsl_15_u16_m_untied:
      86  **	movprfx	z0, z1
      87  **	lsl	z0\.h, p0/m, z0\.h, #15
      88  **	ret
      89  */
      90  TEST_UNIFORM_Z (lsl_15_u16_m_untied, svuint16_t,
      91  		z0 = svlsl_n_u16_m (p0, z1, 15),
      92  		z0 = svlsl_m (p0, z1, 15))
      93  
      94  /*
      95  ** lsl_16_u16_m_tied1:
      96  **	mov	(z[0-9]+\.h), #16
      97  **	lsl	z0\.h, p0/m, z0\.h, \1
      98  **	ret
      99  */
     100  TEST_UNIFORM_Z (lsl_16_u16_m_tied1, svuint16_t,
     101  		z0 = svlsl_n_u16_m (p0, z0, 16),
     102  		z0 = svlsl_m (p0, z0, 16))
     103  
     104  /*
     105  ** lsl_16_u16_m_untied: { xfail *-*-* }
     106  **	mov	(z[0-9]+\.h), #16
     107  **	movprfx	z0, z1
     108  **	lsl	z0\.h, p0/m, z0\.h, \1
     109  **	ret
     110  */
     111  TEST_UNIFORM_Z (lsl_16_u16_m_untied, svuint16_t,
     112  		z0 = svlsl_n_u16_m (p0, z1, 16),
     113  		z0 = svlsl_m (p0, z1, 16))
     114  
     115  /*
     116  ** lsl_u16_z_tied1:
     117  **	movprfx	z0\.h, p0/z, z0\.h
     118  **	lsl	z0\.h, p0/m, z0\.h, z1\.h
     119  **	ret
     120  */
     121  TEST_UNIFORM_Z (lsl_u16_z_tied1, svuint16_t,
     122  		z0 = svlsl_u16_z (p0, z0, z1),
     123  		z0 = svlsl_z (p0, z0, z1))
     124  
     125  /*
     126  ** lsl_u16_z_tied2:
     127  **	movprfx	z0\.h, p0/z, z0\.h
     128  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     129  **	ret
     130  */
     131  TEST_UNIFORM_Z (lsl_u16_z_tied2, svuint16_t,
     132  		z0 = svlsl_u16_z (p0, z1, z0),
     133  		z0 = svlsl_z (p0, z1, z0))
     134  
     135  /*
     136  ** lsl_u16_z_untied:
     137  ** (
     138  **	movprfx	z0\.h, p0/z, z1\.h
     139  **	lsl	z0\.h, p0/m, z0\.h, z2\.h
     140  ** |
     141  **	movprfx	z0\.h, p0/z, z2\.h
     142  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     143  ** )
     144  **	ret
     145  */
     146  TEST_UNIFORM_Z (lsl_u16_z_untied, svuint16_t,
     147  		z0 = svlsl_u16_z (p0, z1, z2),
     148  		z0 = svlsl_z (p0, z1, z2))
     149  
     150  /*
     151  ** lsl_w0_u16_z_tied1:
     152  **	mov	(z[0-9]+\.h), w0
     153  **	movprfx	z0\.h, p0/z, z0\.h
     154  **	lsl	z0\.h, p0/m, z0\.h, \1
     155  **	ret
     156  */
     157  TEST_UNIFORM_ZX (lsl_w0_u16_z_tied1, svuint16_t, uint16_t,
     158  		 z0 = svlsl_n_u16_z (p0, z0, x0),
     159  		 z0 = svlsl_z (p0, z0, x0))
     160  
     161  /*
     162  ** lsl_w0_u16_z_untied:
     163  **	mov	(z[0-9]+\.h), w0
     164  ** (
     165  **	movprfx	z0\.h, p0/z, z1\.h
     166  **	lsl	z0\.h, p0/m, z0\.h, \1
     167  ** |
     168  **	movprfx	z0\.h, p0/z, \1
     169  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     170  ** )
     171  **	ret
     172  */
     173  TEST_UNIFORM_ZX (lsl_w0_u16_z_untied, svuint16_t, uint16_t,
     174  		 z0 = svlsl_n_u16_z (p0, z1, x0),
     175  		 z0 = svlsl_z (p0, z1, x0))
     176  
     177  /*
     178  ** lsl_1_u16_z_tied1:
     179  **	movprfx	z0\.h, p0/z, z0\.h
     180  **	lsl	z0\.h, p0/m, z0\.h, #1
     181  **	ret
     182  */
     183  TEST_UNIFORM_Z (lsl_1_u16_z_tied1, svuint16_t,
     184  		z0 = svlsl_n_u16_z (p0, z0, 1),
     185  		z0 = svlsl_z (p0, z0, 1))
     186  
     187  /*
     188  ** lsl_1_u16_z_untied:
     189  **	movprfx	z0\.h, p0/z, z1\.h
     190  **	lsl	z0\.h, p0/m, z0\.h, #1
     191  **	ret
     192  */
     193  TEST_UNIFORM_Z (lsl_1_u16_z_untied, svuint16_t,
     194  		z0 = svlsl_n_u16_z (p0, z1, 1),
     195  		z0 = svlsl_z (p0, z1, 1))
     196  
     197  /*
     198  ** lsl_15_u16_z_tied1:
     199  **	movprfx	z0\.h, p0/z, z0\.h
     200  **	lsl	z0\.h, p0/m, z0\.h, #15
     201  **	ret
     202  */
     203  TEST_UNIFORM_Z (lsl_15_u16_z_tied1, svuint16_t,
     204  		z0 = svlsl_n_u16_z (p0, z0, 15),
     205  		z0 = svlsl_z (p0, z0, 15))
     206  
     207  /*
     208  ** lsl_15_u16_z_untied:
     209  **	movprfx	z0\.h, p0/z, z1\.h
     210  **	lsl	z0\.h, p0/m, z0\.h, #15
     211  **	ret
     212  */
     213  TEST_UNIFORM_Z (lsl_15_u16_z_untied, svuint16_t,
     214  		z0 = svlsl_n_u16_z (p0, z1, 15),
     215  		z0 = svlsl_z (p0, z1, 15))
     216  
     217  /*
     218  ** lsl_16_u16_z_tied1:
     219  **	mov	(z[0-9]+\.h), #16
     220  **	movprfx	z0\.h, p0/z, z0\.h
     221  **	lsl	z0\.h, p0/m, z0\.h, \1
     222  **	ret
     223  */
     224  TEST_UNIFORM_Z (lsl_16_u16_z_tied1, svuint16_t,
     225  		z0 = svlsl_n_u16_z (p0, z0, 16),
     226  		z0 = svlsl_z (p0, z0, 16))
     227  
     228  /*
     229  ** lsl_16_u16_z_untied:
     230  **	mov	(z[0-9]+\.h), #16
     231  ** (
     232  **	movprfx	z0\.h, p0/z, z1\.h
     233  **	lsl	z0\.h, p0/m, z0\.h, \1
     234  ** |
     235  **	movprfx	z0\.h, p0/z, \1
     236  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     237  ** )
     238  **	ret
     239  */
     240  TEST_UNIFORM_Z (lsl_16_u16_z_untied, svuint16_t,
     241  		z0 = svlsl_n_u16_z (p0, z1, 16),
     242  		z0 = svlsl_z (p0, z1, 16))
     243  
     244  /*
     245  ** lsl_u16_x_tied1:
     246  **	lsl	z0\.h, p0/m, z0\.h, z1\.h
     247  **	ret
     248  */
     249  TEST_UNIFORM_Z (lsl_u16_x_tied1, svuint16_t,
     250  		z0 = svlsl_u16_x (p0, z0, z1),
     251  		z0 = svlsl_x (p0, z0, z1))
     252  
     253  /*
     254  ** lsl_u16_x_tied2:
     255  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     256  **	ret
     257  */
     258  TEST_UNIFORM_Z (lsl_u16_x_tied2, svuint16_t,
     259  		z0 = svlsl_u16_x (p0, z1, z0),
     260  		z0 = svlsl_x (p0, z1, z0))
     261  
     262  /*
     263  ** lsl_u16_x_untied:
     264  ** (
     265  **	movprfx	z0, z1
     266  **	lsl	z0\.h, p0/m, z0\.h, z2\.h
     267  ** |
     268  **	movprfx	z0, z2
     269  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     270  ** )
     271  **	ret
     272  */
     273  TEST_UNIFORM_Z (lsl_u16_x_untied, svuint16_t,
     274  		z0 = svlsl_u16_x (p0, z1, z2),
     275  		z0 = svlsl_x (p0, z1, z2))
     276  
     277  /*
     278  ** lsl_w0_u16_x_tied1:
     279  **	mov	(z[0-9]+\.h), w0
     280  **	lsl	z0\.h, p0/m, z0\.h, \1
     281  **	ret
     282  */
     283  TEST_UNIFORM_ZX (lsl_w0_u16_x_tied1, svuint16_t, uint16_t,
     284  		 z0 = svlsl_n_u16_x (p0, z0, x0),
     285  		 z0 = svlsl_x (p0, z0, x0))
     286  
     287  /*
     288  ** lsl_w0_u16_x_untied:
     289  **	mov	z0\.h, w0
     290  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     291  **	ret
     292  */
     293  TEST_UNIFORM_ZX (lsl_w0_u16_x_untied, svuint16_t, uint16_t,
     294  		 z0 = svlsl_n_u16_x (p0, z1, x0),
     295  		 z0 = svlsl_x (p0, z1, x0))
     296  
     297  /*
     298  ** lsl_1_u16_x_tied1:
     299  **	lsl	z0\.h, z0\.h, #1
     300  **	ret
     301  */
     302  TEST_UNIFORM_Z (lsl_1_u16_x_tied1, svuint16_t,
     303  		z0 = svlsl_n_u16_x (p0, z0, 1),
     304  		z0 = svlsl_x (p0, z0, 1))
     305  
     306  /*
     307  ** lsl_1_u16_x_untied:
     308  **	lsl	z0\.h, z1\.h, #1
     309  **	ret
     310  */
     311  TEST_UNIFORM_Z (lsl_1_u16_x_untied, svuint16_t,
     312  		z0 = svlsl_n_u16_x (p0, z1, 1),
     313  		z0 = svlsl_x (p0, z1, 1))
     314  
     315  /*
     316  ** lsl_15_u16_x_tied1:
     317  **	lsl	z0\.h, z0\.h, #15
     318  **	ret
     319  */
     320  TEST_UNIFORM_Z (lsl_15_u16_x_tied1, svuint16_t,
     321  		z0 = svlsl_n_u16_x (p0, z0, 15),
     322  		z0 = svlsl_x (p0, z0, 15))
     323  
     324  /*
     325  ** lsl_15_u16_x_untied:
     326  **	lsl	z0\.h, z1\.h, #15
     327  **	ret
     328  */
     329  TEST_UNIFORM_Z (lsl_15_u16_x_untied, svuint16_t,
     330  		z0 = svlsl_n_u16_x (p0, z1, 15),
     331  		z0 = svlsl_x (p0, z1, 15))
     332  
     333  /*
     334  ** lsl_16_u16_x_tied1:
     335  **	mov	(z[0-9]+\.h), #16
     336  **	lsl	z0\.h, p0/m, z0\.h, \1
     337  **	ret
     338  */
     339  TEST_UNIFORM_Z (lsl_16_u16_x_tied1, svuint16_t,
     340  		z0 = svlsl_n_u16_x (p0, z0, 16),
     341  		z0 = svlsl_x (p0, z0, 16))
     342  
     343  /*
     344  ** lsl_16_u16_x_untied:
     345  **	mov	z0\.h, #16
     346  **	lslr	z0\.h, p0/m, z0\.h, z1\.h
     347  **	ret
     348  */
     349  TEST_UNIFORM_Z (lsl_16_u16_x_untied, svuint16_t,
     350  		z0 = svlsl_n_u16_x (p0, z1, 16),
     351  		z0 = svlsl_x (p0, z1, 16))