1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qsubr_u8_m_tied1:
       7  **	uqsubr	z0\.b, p0/m, z0\.b, z1\.b
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qsubr_u8_m_tied1, svuint8_t,
      11  		z0 = svqsubr_u8_m (p0, z0, z1),
      12  		z0 = svqsubr_m (p0, z0, z1))
      13  
      14  /*
      15  ** qsubr_u8_m_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z1
      18  **	uqsubr	z0\.b, p0/m, z0\.b, \1\.b
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (qsubr_u8_m_tied2, svuint8_t,
      22  		z0 = svqsubr_u8_m (p0, z1, z0),
      23  		z0 = svqsubr_m (p0, z1, z0))
      24  
      25  /*
      26  ** qsubr_u8_m_untied:
      27  **	movprfx	z0, z1
      28  **	uqsubr	z0\.b, p0/m, z0\.b, z2\.b
      29  **	ret
      30  */
      31  TEST_UNIFORM_Z (qsubr_u8_m_untied, svuint8_t,
      32  		z0 = svqsubr_u8_m (p0, z1, z2),
      33  		z0 = svqsubr_m (p0, z1, z2))
      34  
      35  /*
      36  ** qsubr_w0_u8_m_tied1:
      37  **	mov	(z[0-9]+\.b), w0
      38  **	uqsubr	z0\.b, p0/m, z0\.b, \1
      39  **	ret
      40  */
      41  TEST_UNIFORM_ZX (qsubr_w0_u8_m_tied1, svuint8_t, uint8_t,
      42  		 z0 = svqsubr_n_u8_m (p0, z0, x0),
      43  		 z0 = svqsubr_m (p0, z0, x0))
      44  
      45  /*
      46  ** qsubr_w0_u8_m_untied: { xfail *-*-* }
      47  **	mov	(z[0-9]+\.b), w0
      48  **	movprfx	z0, z1
      49  **	uqsubr	z0\.b, p0/m, z0\.b, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (qsubr_w0_u8_m_untied, svuint8_t, uint8_t,
      53  		 z0 = svqsubr_n_u8_m (p0, z1, x0),
      54  		 z0 = svqsubr_m (p0, z1, x0))
      55  
      56  /*
      57  ** qsubr_1_u8_m_tied1:
      58  **	mov	(z[0-9]+\.b), #1
      59  **	uqsubr	z0\.b, p0/m, z0\.b, \1
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (qsubr_1_u8_m_tied1, svuint8_t,
      63  		z0 = svqsubr_n_u8_m (p0, z0, 1),
      64  		z0 = svqsubr_m (p0, z0, 1))
      65  
      66  /*
      67  ** qsubr_1_u8_m_untied: { xfail *-*-* }
      68  **	mov	(z[0-9]+\.b), #1
      69  **	movprfx	z0, z1
      70  **	uqsubr	z0\.b, p0/m, z0\.b, \1
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (qsubr_1_u8_m_untied, svuint8_t,
      74  		z0 = svqsubr_n_u8_m (p0, z1, 1),
      75  		z0 = svqsubr_m (p0, z1, 1))
      76  
      77  /*
      78  ** qsubr_m1_u8_m:
      79  **	mov	(z[0-9]+\.b), #-1
      80  **	uqsubr	z0\.b, p0/m, z0\.b, \1
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (qsubr_m1_u8_m, svuint8_t,
      84  		z0 = svqsubr_n_u8_m (p0, z0, -1),
      85  		z0 = svqsubr_m (p0, z0, -1))
      86  
      87  /*
      88  ** qsubr_u8_z_tied1:
      89  **	movprfx	z0\.b, p0/z, z0\.b
      90  **	uqsubr	z0\.b, p0/m, z0\.b, z1\.b
      91  **	ret
      92  */
      93  TEST_UNIFORM_Z (qsubr_u8_z_tied1, svuint8_t,
      94  		z0 = svqsubr_u8_z (p0, z0, z1),
      95  		z0 = svqsubr_z (p0, z0, z1))
      96  
      97  /*
      98  ** qsubr_u8_z_tied2:
      99  **	movprfx	z0\.b, p0/z, z0\.b
     100  **	uqsub	z0\.b, p0/m, z0\.b, z1\.b
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (qsubr_u8_z_tied2, svuint8_t,
     104  		z0 = svqsubr_u8_z (p0, z1, z0),
     105  		z0 = svqsubr_z (p0, z1, z0))
     106  
     107  /*
     108  ** qsubr_u8_z_untied:
     109  ** (
     110  **	movprfx	z0\.b, p0/z, z1\.b
     111  **	uqsubr	z0\.b, p0/m, z0\.b, z2\.b
     112  ** |
     113  **	movprfx	z0\.b, p0/z, z2\.b
     114  **	uqsub	z0\.b, p0/m, z0\.b, z1\.b
     115  ** )
     116  **	ret
     117  */
     118  TEST_UNIFORM_Z (qsubr_u8_z_untied, svuint8_t,
     119  		z0 = svqsubr_u8_z (p0, z1, z2),
     120  		z0 = svqsubr_z (p0, z1, z2))
     121  
     122  /*
     123  ** qsubr_w0_u8_z_tied1:
     124  **	mov	(z[0-9]+\.b), w0
     125  **	movprfx	z0\.b, p0/z, z0\.b
     126  **	uqsubr	z0\.b, p0/m, z0\.b, \1
     127  **	ret
     128  */
     129  TEST_UNIFORM_ZX (qsubr_w0_u8_z_tied1, svuint8_t, uint8_t,
     130  		 z0 = svqsubr_n_u8_z (p0, z0, x0),
     131  		 z0 = svqsubr_z (p0, z0, x0))
     132  
     133  /*
     134  ** qsubr_w0_u8_z_untied:
     135  **	mov	(z[0-9]+\.b), w0
     136  ** (
     137  **	movprfx	z0\.b, p0/z, z1\.b
     138  **	uqsubr	z0\.b, p0/m, z0\.b, \1
     139  ** |
     140  **	movprfx	z0\.b, p0/z, \1
     141  **	uqsub	z0\.b, p0/m, z0\.b, z1\.b
     142  ** )
     143  **	ret
     144  */
     145  TEST_UNIFORM_ZX (qsubr_w0_u8_z_untied, svuint8_t, uint8_t,
     146  		 z0 = svqsubr_n_u8_z (p0, z1, x0),
     147  		 z0 = svqsubr_z (p0, z1, x0))
     148  
     149  /*
     150  ** qsubr_1_u8_z_tied1:
     151  **	mov	(z[0-9]+\.b), #1
     152  **	movprfx	z0\.b, p0/z, z0\.b
     153  **	uqsubr	z0\.b, p0/m, z0\.b, \1
     154  **	ret
     155  */
     156  TEST_UNIFORM_Z (qsubr_1_u8_z_tied1, svuint8_t,
     157  		z0 = svqsubr_n_u8_z (p0, z0, 1),
     158  		z0 = svqsubr_z (p0, z0, 1))
     159  
     160  /*
     161  ** qsubr_1_u8_z_untied:
     162  **	mov	(z[0-9]+\.b), #1
     163  ** (
     164  **	movprfx	z0\.b, p0/z, z1\.b
     165  **	uqsubr	z0\.b, p0/m, z0\.b, \1
     166  ** |
     167  **	movprfx	z0\.b, p0/z, \1
     168  **	uqsub	z0\.b, p0/m, z0\.b, z1\.b
     169  ** )
     170  **	ret
     171  */
     172  TEST_UNIFORM_Z (qsubr_1_u8_z_untied, svuint8_t,
     173  		z0 = svqsubr_n_u8_z (p0, z1, 1),
     174  		z0 = svqsubr_z (p0, z1, 1))
     175  
     176  /*
     177  ** qsubr_u8_x_tied1:
     178  **	uqsub	z0\.b, z1\.b, z0\.b
     179  **	ret
     180  */
     181  TEST_UNIFORM_Z (qsubr_u8_x_tied1, svuint8_t,
     182  		z0 = svqsubr_u8_x (p0, z0, z1),
     183  		z0 = svqsubr_x (p0, z0, z1))
     184  
     185  /*
     186  ** qsubr_u8_x_tied2:
     187  **	uqsub	z0\.b, z0\.b, z1\.b
     188  **	ret
     189  */
     190  TEST_UNIFORM_Z (qsubr_u8_x_tied2, svuint8_t,
     191  		z0 = svqsubr_u8_x (p0, z1, z0),
     192  		z0 = svqsubr_x (p0, z1, z0))
     193  
     194  /*
     195  ** qsubr_u8_x_untied:
     196  **	uqsub	z0\.b, z2\.b, z1\.b
     197  **	ret
     198  */
     199  TEST_UNIFORM_Z (qsubr_u8_x_untied, svuint8_t,
     200  		z0 = svqsubr_u8_x (p0, z1, z2),
     201  		z0 = svqsubr_x (p0, z1, z2))
     202  
     203  /*
     204  ** qsubr_w0_u8_x_tied1:
     205  **	mov	(z[0-9]+\.b), w0
     206  **	uqsub	z0\.b, \1, z0\.b
     207  **	ret
     208  */
     209  TEST_UNIFORM_ZX (qsubr_w0_u8_x_tied1, svuint8_t, uint8_t,
     210  		 z0 = svqsubr_n_u8_x (p0, z0, x0),
     211  		 z0 = svqsubr_x (p0, z0, x0))
     212  
     213  /*
     214  ** qsubr_w0_u8_x_untied:
     215  **	mov	(z[0-9]+\.b), w0
     216  **	uqsub	z0\.b, \1, z1\.b
     217  **	ret
     218  */
     219  TEST_UNIFORM_ZX (qsubr_w0_u8_x_untied, svuint8_t, uint8_t,
     220  		 z0 = svqsubr_n_u8_x (p0, z1, x0),
     221  		 z0 = svqsubr_x (p0, z1, x0))
     222  
     223  /*
     224  ** qsubr_1_u8_x_tied1:
     225  **	mov	(z[0-9]+\.b), #1
     226  **	uqsub	z0\.b, \1, z0\.b
     227  **	ret
     228  */
     229  TEST_UNIFORM_Z (qsubr_1_u8_x_tied1, svuint8_t,
     230  		z0 = svqsubr_n_u8_x (p0, z0, 1),
     231  		z0 = svqsubr_x (p0, z0, 1))
     232  
     233  /*
     234  ** qsubr_1_u8_x_untied:
     235  **	mov	(z[0-9]+\.b), #1
     236  **	uqsub	z0\.b, \1, z1\.b
     237  **	ret
     238  */
     239  TEST_UNIFORM_Z (qsubr_1_u8_x_untied, svuint8_t,
     240  		z0 = svqsubr_n_u8_x (p0, z1, 1),
     241  		z0 = svqsubr_x (p0, z1, 1))
     242  
     243  /*
     244  ** qsubr_127_u8_x:
     245  **	mov	(z[0-9]+\.b), #127
     246  **	uqsub	z0\.b, \1, z0\.b
     247  **	ret
     248  */
     249  TEST_UNIFORM_Z (qsubr_127_u8_x, svuint8_t,
     250  		z0 = svqsubr_n_u8_x (p0, z0, 127),
     251  		z0 = svqsubr_x (p0, z0, 127))
     252  
     253  /*
     254  ** qsubr_128_u8_x:
     255  **	mov	(z[0-9]+\.b), #-128
     256  **	uqsub	z0\.b, \1, z0\.b
     257  **	ret
     258  */
     259  TEST_UNIFORM_Z (qsubr_128_u8_x, svuint8_t,
     260  		z0 = svqsubr_n_u8_x (p0, z0, 128),
     261  		z0 = svqsubr_x (p0, z0, 128))
     262  
     263  /*
     264  ** qsubr_255_u8_x:
     265  **	mov	(z[0-9]+\.b), #-1
     266  **	uqsub	z0\.b, \1, z0\.b
     267  **	ret
     268  */
     269  TEST_UNIFORM_Z (qsubr_255_u8_x, svuint8_t,
     270  		z0 = svqsubr_n_u8_x (p0, z0, 255),
     271  		z0 = svqsubr_x (p0, z0, 255))
     272  
     273  /*
     274  ** qsubr_m1_u8_x:
     275  **	mov	(z[0-9]+\.b), #-1
     276  **	uqsub	z0\.b, \1, z0\.b
     277  **	ret
     278  */
     279  TEST_UNIFORM_Z (qsubr_m1_u8_x, svuint8_t,
     280  		z0 = svqsubr_n_u8_x (p0, z0, -1),
     281  		z0 = svqsubr_x (p0, z0, -1))
     282  
     283  /*
     284  ** qsubr_m127_u8_x:
     285  **	mov	(z[0-9]+\.b), #-127
     286  **	uqsub	z0\.b, \1, z0\.b
     287  **	ret
     288  */
     289  TEST_UNIFORM_Z (qsubr_m127_u8_x, svuint8_t,
     290  		z0 = svqsubr_n_u8_x (p0, z0, -127),
     291  		z0 = svqsubr_x (p0, z0, -127))
     292  
     293  /*
     294  ** qsubr_m128_u8_x:
     295  **	mov	(z[0-9]+\.b), #-128
     296  **	uqsub	z0\.b, \1, z0\.b
     297  **	ret
     298  */
     299  TEST_UNIFORM_Z (qsubr_m128_u8_x, svuint8_t,
     300  		z0 = svqsubr_n_u8_x (p0, z0, -128),
     301  		z0 = svqsubr_x (p0, z0, -128))