1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** lsr_u64_m_tied1:
       7  **	lsr	z0\.d, p0/m, z0\.d, z1\.d
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (lsr_u64_m_tied1, svuint64_t,
      11  		z0 = svlsr_u64_m (p0, z0, z1),
      12  		z0 = svlsr_m (p0, z0, z1))
      13  
      14  /*
      15  ** lsr_u64_m_tied2:
      16  **	mov	(z[0-9]+\.d), z0\.d
      17  **	movprfx	z0, z1
      18  **	lsr	z0\.d, p0/m, z0\.d, \1
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (lsr_u64_m_tied2, svuint64_t,
      22  		z0 = svlsr_u64_m (p0, z1, z0),
      23  		z0 = svlsr_m (p0, z1, z0))
      24  
      25  /*
      26  ** lsr_u64_m_untied:
      27  **	movprfx	z0, z1
      28  **	lsr	z0\.d, p0/m, z0\.d, z2\.d
      29  **	ret
      30  */
      31  TEST_UNIFORM_Z (lsr_u64_m_untied, svuint64_t,
      32  		z0 = svlsr_u64_m (p0, z1, z2),
      33  		z0 = svlsr_m (p0, z1, z2))
      34  
      35  /*
      36  ** lsr_x0_u64_m_tied1:
      37  **	mov	(z[0-9]+\.d), x0
      38  **	lsr	z0\.d, p0/m, z0\.d, \1
      39  **	ret
      40  */
      41  TEST_UNIFORM_ZX (lsr_x0_u64_m_tied1, svuint64_t, uint64_t,
      42  		 z0 = svlsr_n_u64_m (p0, z0, x0),
      43  		 z0 = svlsr_m (p0, z0, x0))
      44  
      45  /*
      46  ** lsr_x0_u64_m_untied:
      47  **	mov	(z[0-9]+\.d), x0
      48  **	movprfx	z0, z1
      49  **	lsr	z0\.d, p0/m, z0\.d, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (lsr_x0_u64_m_untied, svuint64_t, uint64_t,
      53  		 z0 = svlsr_n_u64_m (p0, z1, x0),
      54  		 z0 = svlsr_m (p0, z1, x0))
      55  
      56  /*
      57  ** lsr_1_u64_m_tied1:
      58  **	lsr	z0\.d, p0/m, z0\.d, #1
      59  **	ret
      60  */
      61  TEST_UNIFORM_Z (lsr_1_u64_m_tied1, svuint64_t,
      62  		z0 = svlsr_n_u64_m (p0, z0, 1),
      63  		z0 = svlsr_m (p0, z0, 1))
      64  
      65  /*
      66  ** lsr_1_u64_m_untied:
      67  **	movprfx	z0, z1
      68  **	lsr	z0\.d, p0/m, z0\.d, #1
      69  **	ret
      70  */
      71  TEST_UNIFORM_Z (lsr_1_u64_m_untied, svuint64_t,
      72  		z0 = svlsr_n_u64_m (p0, z1, 1),
      73  		z0 = svlsr_m (p0, z1, 1))
      74  
      75  /*
      76  ** lsr_63_u64_m_tied1:
      77  **	lsr	z0\.d, p0/m, z0\.d, #63
      78  **	ret
      79  */
      80  TEST_UNIFORM_Z (lsr_63_u64_m_tied1, svuint64_t,
      81  		z0 = svlsr_n_u64_m (p0, z0, 63),
      82  		z0 = svlsr_m (p0, z0, 63))
      83  
      84  /*
      85  ** lsr_63_u64_m_untied:
      86  **	movprfx	z0, z1
      87  **	lsr	z0\.d, p0/m, z0\.d, #63
      88  **	ret
      89  */
      90  TEST_UNIFORM_Z (lsr_63_u64_m_untied, svuint64_t,
      91  		z0 = svlsr_n_u64_m (p0, z1, 63),
      92  		z0 = svlsr_m (p0, z1, 63))
      93  
      94  /*
      95  ** lsr_64_u64_m_tied1:
      96  **	lsr	z0\.d, p0/m, z0\.d, #64
      97  **	ret
      98  */
      99  TEST_UNIFORM_Z (lsr_64_u64_m_tied1, svuint64_t,
     100  		z0 = svlsr_n_u64_m (p0, z0, 64),
     101  		z0 = svlsr_m (p0, z0, 64))
     102  
     103  /*
     104  ** lsr_64_u64_m_untied:
     105  **	movprfx	z0, z1
     106  **	lsr	z0\.d, p0/m, z0\.d, #64
     107  **	ret
     108  */
     109  TEST_UNIFORM_Z (lsr_64_u64_m_untied, svuint64_t,
     110  		z0 = svlsr_n_u64_m (p0, z1, 64),
     111  		z0 = svlsr_m (p0, z1, 64))
     112  
     113  /*
     114  ** lsr_u64_z_tied1:
     115  **	movprfx	z0\.d, p0/z, z0\.d
     116  **	lsr	z0\.d, p0/m, z0\.d, z1\.d
     117  **	ret
     118  */
     119  TEST_UNIFORM_Z (lsr_u64_z_tied1, svuint64_t,
     120  		z0 = svlsr_u64_z (p0, z0, z1),
     121  		z0 = svlsr_z (p0, z0, z1))
     122  
     123  /*
     124  ** lsr_u64_z_tied2:
     125  **	movprfx	z0\.d, p0/z, z0\.d
     126  **	lsrr	z0\.d, p0/m, z0\.d, z1\.d
     127  **	ret
     128  */
     129  TEST_UNIFORM_Z (lsr_u64_z_tied2, svuint64_t,
     130  		z0 = svlsr_u64_z (p0, z1, z0),
     131  		z0 = svlsr_z (p0, z1, z0))
     132  
     133  /*
     134  ** lsr_u64_z_untied:
     135  ** (
     136  **	movprfx	z0\.d, p0/z, z1\.d
     137  **	lsr	z0\.d, p0/m, z0\.d, z2\.d
     138  ** |
     139  **	movprfx	z0\.d, p0/z, z2\.d
     140  **	lsrr	z0\.d, p0/m, z0\.d, z1\.d
     141  ** )
     142  **	ret
     143  */
     144  TEST_UNIFORM_Z (lsr_u64_z_untied, svuint64_t,
     145  		z0 = svlsr_u64_z (p0, z1, z2),
     146  		z0 = svlsr_z (p0, z1, z2))
     147  
     148  /*
     149  ** lsr_x0_u64_z_tied1:
     150  **	mov	(z[0-9]+\.d), x0
     151  **	movprfx	z0\.d, p0/z, z0\.d
     152  **	lsr	z0\.d, p0/m, z0\.d, \1
     153  **	ret
     154  */
     155  TEST_UNIFORM_ZX (lsr_x0_u64_z_tied1, svuint64_t, uint64_t,
     156  		 z0 = svlsr_n_u64_z (p0, z0, x0),
     157  		 z0 = svlsr_z (p0, z0, x0))
     158  
     159  /*
     160  ** lsr_x0_u64_z_untied:
     161  **	mov	(z[0-9]+\.d), x0
     162  ** (
     163  **	movprfx	z0\.d, p0/z, z1\.d
     164  **	lsr	z0\.d, p0/m, z0\.d, \1
     165  ** |
     166  **	movprfx	z0\.d, p0/z, \1
     167  **	lsrr	z0\.d, p0/m, z0\.d, z1\.d
     168  ** )
     169  **	ret
     170  */
     171  TEST_UNIFORM_ZX (lsr_x0_u64_z_untied, svuint64_t, uint64_t,
     172  		 z0 = svlsr_n_u64_z (p0, z1, x0),
     173  		 z0 = svlsr_z (p0, z1, x0))
     174  
     175  /*
     176  ** lsr_1_u64_z_tied1:
     177  **	movprfx	z0\.d, p0/z, z0\.d
     178  **	lsr	z0\.d, p0/m, z0\.d, #1
     179  **	ret
     180  */
     181  TEST_UNIFORM_Z (lsr_1_u64_z_tied1, svuint64_t,
     182  		z0 = svlsr_n_u64_z (p0, z0, 1),
     183  		z0 = svlsr_z (p0, z0, 1))
     184  
     185  /*
     186  ** lsr_1_u64_z_untied:
     187  **	movprfx	z0\.d, p0/z, z1\.d
     188  **	lsr	z0\.d, p0/m, z0\.d, #1
     189  **	ret
     190  */
     191  TEST_UNIFORM_Z (lsr_1_u64_z_untied, svuint64_t,
     192  		z0 = svlsr_n_u64_z (p0, z1, 1),
     193  		z0 = svlsr_z (p0, z1, 1))
     194  
     195  /*
     196  ** lsr_63_u64_z_tied1:
     197  **	movprfx	z0\.d, p0/z, z0\.d
     198  **	lsr	z0\.d, p0/m, z0\.d, #63
     199  **	ret
     200  */
     201  TEST_UNIFORM_Z (lsr_63_u64_z_tied1, svuint64_t,
     202  		z0 = svlsr_n_u64_z (p0, z0, 63),
     203  		z0 = svlsr_z (p0, z0, 63))
     204  
     205  /*
     206  ** lsr_63_u64_z_untied:
     207  **	movprfx	z0\.d, p0/z, z1\.d
     208  **	lsr	z0\.d, p0/m, z0\.d, #63
     209  **	ret
     210  */
     211  TEST_UNIFORM_Z (lsr_63_u64_z_untied, svuint64_t,
     212  		z0 = svlsr_n_u64_z (p0, z1, 63),
     213  		z0 = svlsr_z (p0, z1, 63))
     214  
     215  /*
     216  ** lsr_64_u64_z_tied1:
     217  **	movprfx	z0\.d, p0/z, z0\.d
     218  **	lsr	z0\.d, p0/m, z0\.d, #64
     219  **	ret
     220  */
     221  TEST_UNIFORM_Z (lsr_64_u64_z_tied1, svuint64_t,
     222  		z0 = svlsr_n_u64_z (p0, z0, 64),
     223  		z0 = svlsr_z (p0, z0, 64))
     224  
     225  /*
     226  ** lsr_64_u64_z_untied:
     227  **	movprfx	z0\.d, p0/z, z1\.d
     228  **	lsr	z0\.d, p0/m, z0\.d, #64
     229  **	ret
     230  */
     231  TEST_UNIFORM_Z (lsr_64_u64_z_untied, svuint64_t,
     232  		z0 = svlsr_n_u64_z (p0, z1, 64),
     233  		z0 = svlsr_z (p0, z1, 64))
     234  
     235  /*
     236  ** lsr_u64_x_tied1:
     237  **	lsr	z0\.d, p0/m, z0\.d, z1\.d
     238  **	ret
     239  */
     240  TEST_UNIFORM_Z (lsr_u64_x_tied1, svuint64_t,
     241  		z0 = svlsr_u64_x (p0, z0, z1),
     242  		z0 = svlsr_x (p0, z0, z1))
     243  
     244  /*
     245  ** lsr_u64_x_tied2:
     246  **	lsrr	z0\.d, p0/m, z0\.d, z1\.d
     247  **	ret
     248  */
     249  TEST_UNIFORM_Z (lsr_u64_x_tied2, svuint64_t,
     250  		z0 = svlsr_u64_x (p0, z1, z0),
     251  		z0 = svlsr_x (p0, z1, z0))
     252  
     253  /*
     254  ** lsr_u64_x_untied:
     255  ** (
     256  **	movprfx	z0, z1
     257  **	lsr	z0\.d, p0/m, z0\.d, z2\.d
     258  ** |
     259  **	movprfx	z0, z2
     260  **	lsrr	z0\.d, p0/m, z0\.d, z1\.d
     261  ** )
     262  **	ret
     263  */
     264  TEST_UNIFORM_Z (lsr_u64_x_untied, svuint64_t,
     265  		z0 = svlsr_u64_x (p0, z1, z2),
     266  		z0 = svlsr_x (p0, z1, z2))
     267  
     268  /*
     269  ** lsr_x0_u64_x_tied1:
     270  **	mov	(z[0-9]+\.d), x0
     271  **	lsr	z0\.d, p0/m, z0\.d, \1
     272  **	ret
     273  */
     274  TEST_UNIFORM_ZX (lsr_x0_u64_x_tied1, svuint64_t, uint64_t,
     275  		 z0 = svlsr_n_u64_x (p0, z0, x0),
     276  		 z0 = svlsr_x (p0, z0, x0))
     277  
     278  /*
     279  ** lsr_x0_u64_x_untied:
     280  **	mov	z0\.d, x0
     281  **	lsrr	z0\.d, p0/m, z0\.d, z1\.d
     282  **	ret
     283  */
     284  TEST_UNIFORM_ZX (lsr_x0_u64_x_untied, svuint64_t, uint64_t,
     285  		 z0 = svlsr_n_u64_x (p0, z1, x0),
     286  		 z0 = svlsr_x (p0, z1, x0))
     287  
     288  /*
     289  ** lsr_1_u64_x_tied1:
     290  **	lsr	z0\.d, z0\.d, #1
     291  **	ret
     292  */
     293  TEST_UNIFORM_Z (lsr_1_u64_x_tied1, svuint64_t,
     294  		z0 = svlsr_n_u64_x (p0, z0, 1),
     295  		z0 = svlsr_x (p0, z0, 1))
     296  
     297  /*
     298  ** lsr_1_u64_x_untied:
     299  **	lsr	z0\.d, z1\.d, #1
     300  **	ret
     301  */
     302  TEST_UNIFORM_Z (lsr_1_u64_x_untied, svuint64_t,
     303  		z0 = svlsr_n_u64_x (p0, z1, 1),
     304  		z0 = svlsr_x (p0, z1, 1))
     305  
     306  /*
     307  ** lsr_63_u64_x_tied1:
     308  **	lsr	z0\.d, z0\.d, #63
     309  **	ret
     310  */
     311  TEST_UNIFORM_Z (lsr_63_u64_x_tied1, svuint64_t,
     312  		z0 = svlsr_n_u64_x (p0, z0, 63),
     313  		z0 = svlsr_x (p0, z0, 63))
     314  
     315  /*
     316  ** lsr_63_u64_x_untied:
     317  **	lsr	z0\.d, z1\.d, #63
     318  **	ret
     319  */
     320  TEST_UNIFORM_Z (lsr_63_u64_x_untied, svuint64_t,
     321  		z0 = svlsr_n_u64_x (p0, z1, 63),
     322  		z0 = svlsr_x (p0, z1, 63))
     323  
     324  /*
     325  ** lsr_64_u64_x_tied1:
     326  **	lsr	z0\.d, z0\.d, #64
     327  **	ret
     328  */
     329  TEST_UNIFORM_Z (lsr_64_u64_x_tied1, svuint64_t,
     330  		z0 = svlsr_n_u64_x (p0, z0, 64),
     331  		z0 = svlsr_x (p0, z0, 64))
     332  
     333  /*
     334  ** lsr_64_u64_x_untied:
     335  **	lsr	z0\.d, z1\.d, #64
     336  **	ret
     337  */
     338  TEST_UNIFORM_Z (lsr_64_u64_x_untied, svuint64_t,
     339  		z0 = svlsr_n_u64_x (p0, z1, 64),
     340  		z0 = svlsr_x (p0, z1, 64))