(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve2/
acle/
asm/
hsub_u8.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** hsub_u8_m_tied1:
       7  **	uhsub	z0\.b, p0/m, z0\.b, z1\.b
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (hsub_u8_m_tied1, svuint8_t,
      11  		z0 = svhsub_u8_m (p0, z0, z1),
      12  		z0 = svhsub_m (p0, z0, z1))
      13  
      14  /*
      15  ** hsub_u8_m_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z1
      18  **	uhsub	z0\.b, p0/m, z0\.b, \1\.b
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (hsub_u8_m_tied2, svuint8_t,
      22  		z0 = svhsub_u8_m (p0, z1, z0),
      23  		z0 = svhsub_m (p0, z1, z0))
      24  
      25  /*
      26  ** hsub_u8_m_untied:
      27  **	movprfx	z0, z1
      28  **	uhsub	z0\.b, p0/m, z0\.b, z2\.b
      29  **	ret
      30  */
      31  TEST_UNIFORM_Z (hsub_u8_m_untied, svuint8_t,
      32  		z0 = svhsub_u8_m (p0, z1, z2),
      33  		z0 = svhsub_m (p0, z1, z2))
      34  
      35  /*
      36  ** hsub_w0_u8_m_tied1:
      37  **	mov	(z[0-9]+\.b), w0
      38  **	uhsub	z0\.b, p0/m, z0\.b, \1
      39  **	ret
      40  */
      41  TEST_UNIFORM_ZX (hsub_w0_u8_m_tied1, svuint8_t, uint8_t,
      42  		 z0 = svhsub_n_u8_m (p0, z0, x0),
      43  		 z0 = svhsub_m (p0, z0, x0))
      44  
      45  /*
      46  ** hsub_w0_u8_m_untied:: { xfail *-*-*}
      47  **	mov	(z[0-9]+\.b), w0
      48  **	movprfx	z0, z1
      49  **	uhsub	z0\.b, p0/m, z0\.b, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (hsub_w0_u8_m_untied, svuint8_t, uint8_t,
      53  		 z0 = svhsub_n_u8_m (p0, z1, x0),
      54  		 z0 = svhsub_m (p0, z1, x0))
      55  
      56  /*
      57  ** hsub_11_u8_m_tied1:
      58  **	mov	(z[0-9]+\.b), #11
      59  **	uhsub	z0\.b, p0/m, z0\.b, \1
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (hsub_11_u8_m_tied1, svuint8_t,
      63  		z0 = svhsub_n_u8_m (p0, z0, 11),
      64  		z0 = svhsub_m (p0, z0, 11))
      65  
      66  /*
      67  ** hsub_11_u8_m_untied:: { xfail *-*-*}
      68  **	mov	(z[0-9]+\.b), #11
      69  **	movprfx	z0, z1
      70  **	uhsub	z0\.b, p0/m, z0\.b, \1
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (hsub_11_u8_m_untied, svuint8_t,
      74  		z0 = svhsub_n_u8_m (p0, z1, 11),
      75  		z0 = svhsub_m (p0, z1, 11))
      76  
      77  /*
      78  ** hsub_u8_z_tied1:
      79  **	movprfx	z0\.b, p0/z, z0\.b
      80  **	uhsub	z0\.b, p0/m, z0\.b, z1\.b
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (hsub_u8_z_tied1, svuint8_t,
      84  		z0 = svhsub_u8_z (p0, z0, z1),
      85  		z0 = svhsub_z (p0, z0, z1))
      86  
      87  /*
      88  ** hsub_u8_z_tied2:
      89  **	movprfx	z0\.b, p0/z, z0\.b
      90  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
      91  **	ret
      92  */
      93  TEST_UNIFORM_Z (hsub_u8_z_tied2, svuint8_t,
      94  		z0 = svhsub_u8_z (p0, z1, z0),
      95  		z0 = svhsub_z (p0, z1, z0))
      96  
      97  /*
      98  ** hsub_u8_z_untied:
      99  ** (
     100  **	movprfx	z0\.b, p0/z, z1\.b
     101  **	uhsub	z0\.b, p0/m, z0\.b, z2\.b
     102  ** |
     103  **	movprfx	z0\.b, p0/z, z2\.b
     104  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
     105  ** )
     106  **	ret
     107  */
     108  TEST_UNIFORM_Z (hsub_u8_z_untied, svuint8_t,
     109  		z0 = svhsub_u8_z (p0, z1, z2),
     110  		z0 = svhsub_z (p0, z1, z2))
     111  
     112  /*
     113  ** hsub_w0_u8_z_tied1:
     114  **	mov	(z[0-9]+\.b), w0
     115  **	movprfx	z0\.b, p0/z, z0\.b
     116  **	uhsub	z0\.b, p0/m, z0\.b, \1
     117  **	ret
     118  */
     119  TEST_UNIFORM_ZX (hsub_w0_u8_z_tied1, svuint8_t, uint8_t,
     120  		 z0 = svhsub_n_u8_z (p0, z0, x0),
     121  		 z0 = svhsub_z (p0, z0, x0))
     122  
     123  /*
     124  ** hsub_w0_u8_z_untied:
     125  **	mov	(z[0-9]+\.b), w0
     126  ** (
     127  **	movprfx	z0\.b, p0/z, z1\.b
     128  **	uhsub	z0\.b, p0/m, z0\.b, \1
     129  ** |
     130  **	movprfx	z0\.b, p0/z, \1
     131  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
     132  ** )
     133  **	ret
     134  */
     135  TEST_UNIFORM_ZX (hsub_w0_u8_z_untied, svuint8_t, uint8_t,
     136  		 z0 = svhsub_n_u8_z (p0, z1, x0),
     137  		 z0 = svhsub_z (p0, z1, x0))
     138  
     139  /*
     140  ** hsub_11_u8_z_tied1:
     141  **	mov	(z[0-9]+\.b), #11
     142  **	movprfx	z0\.b, p0/z, z0\.b
     143  **	uhsub	z0\.b, p0/m, z0\.b, \1
     144  **	ret
     145  */
     146  TEST_UNIFORM_Z (hsub_11_u8_z_tied1, svuint8_t,
     147  		z0 = svhsub_n_u8_z (p0, z0, 11),
     148  		z0 = svhsub_z (p0, z0, 11))
     149  
     150  /*
     151  ** hsub_11_u8_z_untied:
     152  **	mov	(z[0-9]+\.b), #11
     153  ** (
     154  **	movprfx	z0\.b, p0/z, z1\.b
     155  **	uhsub	z0\.b, p0/m, z0\.b, \1
     156  ** |
     157  **	movprfx	z0\.b, p0/z, \1
     158  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
     159  ** )
     160  **	ret
     161  */
     162  TEST_UNIFORM_Z (hsub_11_u8_z_untied, svuint8_t,
     163  		z0 = svhsub_n_u8_z (p0, z1, 11),
     164  		z0 = svhsub_z (p0, z1, 11))
     165  
     166  /*
     167  ** hsub_u8_x_tied1:
     168  **	uhsub	z0\.b, p0/m, z0\.b, z1\.b
     169  **	ret
     170  */
     171  TEST_UNIFORM_Z (hsub_u8_x_tied1, svuint8_t,
     172  		z0 = svhsub_u8_x (p0, z0, z1),
     173  		z0 = svhsub_x (p0, z0, z1))
     174  
     175  /*
     176  ** hsub_u8_x_tied2:
     177  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
     178  **	ret
     179  */
     180  TEST_UNIFORM_Z (hsub_u8_x_tied2, svuint8_t,
     181  		z0 = svhsub_u8_x (p0, z1, z0),
     182  		z0 = svhsub_x (p0, z1, z0))
     183  
     184  /*
     185  ** hsub_u8_x_untied:
     186  ** (
     187  **	movprfx	z0, z1
     188  **	uhsub	z0\.b, p0/m, z0\.b, z2\.b
     189  ** |
     190  **	movprfx	z0, z2
     191  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
     192  ** )
     193  **	ret
     194  */
     195  TEST_UNIFORM_Z (hsub_u8_x_untied, svuint8_t,
     196  		z0 = svhsub_u8_x (p0, z1, z2),
     197  		z0 = svhsub_x (p0, z1, z2))
     198  
     199  /*
     200  ** hsub_w0_u8_x_tied1:
     201  **	mov	(z[0-9]+\.b), w0
     202  **	uhsub	z0\.b, p0/m, z0\.b, \1
     203  **	ret
     204  */
     205  TEST_UNIFORM_ZX (hsub_w0_u8_x_tied1, svuint8_t, uint8_t,
     206  		 z0 = svhsub_n_u8_x (p0, z0, x0),
     207  		 z0 = svhsub_x (p0, z0, x0))
     208  
     209  /*
     210  ** hsub_w0_u8_x_untied:
     211  **	mov	z0\.b, w0
     212  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
     213  **	ret
     214  */
     215  TEST_UNIFORM_ZX (hsub_w0_u8_x_untied, svuint8_t, uint8_t,
     216  		 z0 = svhsub_n_u8_x (p0, z1, x0),
     217  		 z0 = svhsub_x (p0, z1, x0))
     218  
     219  /*
     220  ** hsub_11_u8_x_tied1:
     221  **	mov	(z[0-9]+\.b), #11
     222  **	uhsub	z0\.b, p0/m, z0\.b, \1
     223  **	ret
     224  */
     225  TEST_UNIFORM_Z (hsub_11_u8_x_tied1, svuint8_t,
     226  		z0 = svhsub_n_u8_x (p0, z0, 11),
     227  		z0 = svhsub_x (p0, z0, 11))
     228  
     229  /*
     230  ** hsub_11_u8_x_untied:
     231  **	mov	z0\.b, #11
     232  **	uhsubr	z0\.b, p0/m, z0\.b, z1\.b
     233  **	ret
     234  */
     235  TEST_UNIFORM_Z (hsub_11_u8_x_untied, svuint8_t,
     236  		z0 = svhsub_n_u8_x (p0, z1, 11),
     237  		z0 = svhsub_x (p0, z1, 11))