(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve2/
acle/
asm/
sqadd_u32.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** sqadd_u32_m_tied1:
       7  **	usqadd	z0\.s, p0/m, z0\.s, z4\.s
       8  **	ret
       9  */
      10  TEST_DUAL_Z (sqadd_u32_m_tied1, svuint32_t, svint32_t,
      11  	     z0 = svsqadd_u32_m (p0, z0, z4),
      12  	     z0 = svsqadd_m (p0, z0, z4))
      13  
      14  /*
      15  ** sqadd_u32_m_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z4
      18  **	usqadd	z0\.s, p0/m, z0\.s, \1\.s
      19  **	ret
      20  */
      21  TEST_DUAL_Z_REV (sqadd_u32_m_tied2, svuint32_t, svint32_t,
      22  		 z0_res = svsqadd_u32_m (p0, z4, z0),
      23  		 z0_res = svsqadd_m (p0, z4, z0))
      24  
      25  /*
      26  ** sqadd_u32_m_untied:
      27  **	movprfx	z0, z1
      28  **	usqadd	z0\.s, p0/m, z0\.s, z4\.s
      29  **	ret
      30  */
      31  TEST_DUAL_Z (sqadd_u32_m_untied, svuint32_t, svint32_t,
      32  	     z0 = svsqadd_u32_m (p0, z1, z4),
      33  	     z0 = svsqadd_m (p0, z1, z4))
      34  
      35  /*
      36  ** sqadd_w0_u32_m_tied1:
      37  **	mov	(z[0-9]+\.s), w0
      38  **	usqadd	z0\.s, p0/m, z0\.s, \1
      39  **	ret
      40  */
      41  TEST_UNIFORM_ZX (sqadd_w0_u32_m_tied1, svuint32_t, int32_t,
      42  		 z0 = svsqadd_n_u32_m (p0, z0, x0),
      43  		 z0 = svsqadd_m (p0, z0, x0))
      44  
      45  /*
      46  ** sqadd_w0_u32_m_untied:
      47  **	mov	(z[0-9]+\.s), w0
      48  **	movprfx	z0, z1
      49  **	usqadd	z0\.s, p0/m, z0\.s, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (sqadd_w0_u32_m_untied, svuint32_t, int32_t,
      53  		 z0 = svsqadd_n_u32_m (p0, z1, x0),
      54  		 z0 = svsqadd_m (p0, z1, x0))
      55  
      56  /*
      57  ** sqadd_1_u32_m_tied1:
      58  **	mov	(z[0-9]+\.s), #1
      59  **	usqadd	z0\.s, p0/m, z0\.s, \1
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (sqadd_1_u32_m_tied1, svuint32_t,
      63  		z0 = svsqadd_n_u32_m (p0, z0, 1),
      64  		z0 = svsqadd_m (p0, z0, 1))
      65  
      66  /*
      67  ** sqadd_1_u32_m_untied:: { xfail *-*-*}
      68  **	mov	(z[0-9]+\.s), #1
      69  **	movprfx	z0, z1
      70  **	usqadd	z0\.s, p0/m, z0\.s, \1
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (sqadd_1_u32_m_untied, svuint32_t,
      74  		z0 = svsqadd_n_u32_m (p0, z1, 1),
      75  		z0 = svsqadd_m (p0, z1, 1))
      76  
      77  /*
      78  ** sqadd_127_u32_m:
      79  **	mov	(z[0-9]+\.s), #127
      80  **	usqadd	z0\.s, p0/m, z0\.s, \1
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (sqadd_127_u32_m, svuint32_t,
      84  		z0 = svsqadd_n_u32_m (p0, z0, 127),
      85  		z0 = svsqadd_m (p0, z0, 127))
      86  
      87  /*
      88  ** sqadd_128_u32_m:
      89  **	mov	(z[0-9]+\.s), #128
      90  **	usqadd	z0\.s, p0/m, z0\.s, \1
      91  **	ret
      92  */
      93  TEST_UNIFORM_Z (sqadd_128_u32_m, svuint32_t,
      94  		z0 = svsqadd_n_u32_m (p0, z0, 128),
      95  		z0 = svsqadd_m (p0, z0, 128))
      96  
      97  /*
      98  ** sqadd_255_u32_m:
      99  **	mov	(z[0-9]+\.s), #255
     100  **	usqadd	z0\.s, p0/m, z0\.s, \1
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (sqadd_255_u32_m, svuint32_t,
     104  		z0 = svsqadd_n_u32_m (p0, z0, 255),
     105  		z0 = svsqadd_m (p0, z0, 255))
     106  
     107  /*
     108  ** sqadd_m1_u32_m:
     109  **	mov	(z[0-9]+)\.b, #-1
     110  **	usqadd	z0\.s, p0/m, z0\.s, \1\.s
     111  **	ret
     112  */
     113  TEST_UNIFORM_Z (sqadd_m1_u32_m, svuint32_t,
     114  		z0 = svsqadd_n_u32_m (p0, z0, -1),
     115  		z0 = svsqadd_m (p0, z0, -1))
     116  
     117  /*
     118  ** sqadd_m127_u32_m:
     119  **	mov	(z[0-9]+\.s), #-127
     120  **	usqadd	z0\.s, p0/m, z0\.s, \1
     121  **	ret
     122  */
     123  TEST_UNIFORM_Z (sqadd_m127_u32_m, svuint32_t,
     124  		z0 = svsqadd_n_u32_m (p0, z0, -127),
     125  		z0 = svsqadd_m (p0, z0, -127))
     126  
     127  /*
     128  ** sqadd_m128_u32_m:
     129  **	mov	(z[0-9]+\.s), #-128
     130  **	usqadd	z0\.s, p0/m, z0\.s, \1
     131  **	ret
     132  */
     133  TEST_UNIFORM_Z (sqadd_m128_u32_m, svuint32_t,
     134  		z0 = svsqadd_n_u32_m (p0, z0, -128),
     135  		z0 = svsqadd_m (p0, z0, -128))
     136  
     137  /*
     138  ** sqadd_u32_z_tied1:
     139  **	movprfx	z0\.s, p0/z, z0\.s
     140  **	usqadd	z0\.s, p0/m, z0\.s, z4\.s
     141  **	ret
     142  */
     143  TEST_DUAL_Z (sqadd_u32_z_tied1, svuint32_t, svint32_t,
     144  	     z0 = svsqadd_u32_z (p0, z0, z4),
     145  	     z0 = svsqadd_z (p0, z0, z4))
     146  
     147  /*
     148  ** sqadd_u32_z_tied2:
     149  **	mov	(z[0-9]+)\.d, z0\.d
     150  **	movprfx	z0\.s, p0/z, z4\.s
     151  **	usqadd	z0\.s, p0/m, z0\.s, \1\.s
     152  **	ret
     153  */
     154  TEST_DUAL_Z_REV (sqadd_u32_z_tied2, svuint32_t, svint32_t,
     155  		 z0_res = svsqadd_u32_z (p0, z4, z0),
     156  		 z0_res = svsqadd_z (p0, z4, z0))
     157  
     158  /*
     159  ** sqadd_u32_z_untied:
     160  **	movprfx	z0\.s, p0/z, z1\.s
     161  **	usqadd	z0\.s, p0/m, z0\.s, z4\.s
     162  **	ret
     163  */
     164  TEST_DUAL_Z (sqadd_u32_z_untied, svuint32_t, svint32_t,
     165  	     z0 = svsqadd_u32_z (p0, z1, z4),
     166  	     z0 = svsqadd_z (p0, z1, z4))
     167  
     168  /*
     169  ** sqadd_w0_u32_z_tied1:
     170  **	mov	(z[0-9]+\.s), w0
     171  **	movprfx	z0\.s, p0/z, z0\.s
     172  **	usqadd	z0\.s, p0/m, z0\.s, \1
     173  **	ret
     174  */
     175  TEST_UNIFORM_ZX (sqadd_w0_u32_z_tied1, svuint32_t, int32_t,
     176  		 z0 = svsqadd_n_u32_z (p0, z0, x0),
     177  		 z0 = svsqadd_z (p0, z0, x0))
     178  
     179  /*
     180  ** sqadd_w0_u32_z_untied:: { xfail *-*-*}
     181  **	mov	(z[0-9]+\.s), w0
     182  **	movprfx	z0\.s, p0/z, z1\.s
     183  **	usqadd	z0\.s, p0/m, z0\.s, \1
     184  **	ret
     185  */
     186  TEST_UNIFORM_ZX (sqadd_w0_u32_z_untied, svuint32_t, int32_t,
     187  		 z0 = svsqadd_n_u32_z (p0, z1, x0),
     188  		 z0 = svsqadd_z (p0, z1, x0))
     189  
     190  /*
     191  ** sqadd_1_u32_z_tied1:
     192  **	mov	(z[0-9]+\.s), #1
     193  **	movprfx	z0\.s, p0/z, z0\.s
     194  **	usqadd	z0\.s, p0/m, z0\.s, \1
     195  **	ret
     196  */
     197  TEST_UNIFORM_Z (sqadd_1_u32_z_tied1, svuint32_t,
     198  		z0 = svsqadd_n_u32_z (p0, z0, 1),
     199  		z0 = svsqadd_z (p0, z0, 1))
     200  
     201  /*
     202  ** sqadd_1_u32_z_untied:: { xfail *-*-*}
     203  **	mov	(z[0-9]+\.s), #1
     204  **	movprfx	z0\.s, p0/z, z1\.s
     205  **	usqadd	z0\.s, p0/m, z0\.s, \1
     206  **	ret
     207  */
     208  TEST_UNIFORM_Z (sqadd_1_u32_z_untied, svuint32_t,
     209  		z0 = svsqadd_n_u32_z (p0, z1, 1),
     210  		z0 = svsqadd_z (p0, z1, 1))
     211  
     212  /*
     213  ** sqadd_127_u32_z:
     214  **	mov	(z[0-9]+\.s), #127
     215  **	movprfx	z0\.s, p0/z, z0\.s
     216  **	usqadd	z0\.s, p0/m, z0\.s, \1
     217  **	ret
     218  */
     219  TEST_UNIFORM_Z (sqadd_127_u32_z, svuint32_t,
     220  		z0 = svsqadd_n_u32_z (p0, z0, 127),
     221  		z0 = svsqadd_z (p0, z0, 127))
     222  
     223  /*
     224  ** sqadd_128_u32_z:
     225  **	mov	(z[0-9]+\.s), #128
     226  **	movprfx	z0\.s, p0/z, z0\.s
     227  **	usqadd	z0\.s, p0/m, z0\.s, \1
     228  **	ret
     229  */
     230  TEST_UNIFORM_Z (sqadd_128_u32_z, svuint32_t,
     231  		z0 = svsqadd_n_u32_z (p0, z0, 128),
     232  		z0 = svsqadd_z (p0, z0, 128))
     233  
     234  /*
     235  ** sqadd_255_u32_z:
     236  **	mov	(z[0-9]+\.s), #255
     237  **	movprfx	z0\.s, p0/z, z0\.s
     238  **	usqadd	z0\.s, p0/m, z0\.s, \1
     239  **	ret
     240  */
     241  TEST_UNIFORM_Z (sqadd_255_u32_z, svuint32_t,
     242  		z0 = svsqadd_n_u32_z (p0, z0, 255),
     243  		z0 = svsqadd_z (p0, z0, 255))
     244  
     245  /*
     246  ** sqadd_m1_u32_z:
     247  **	mov	(z[0-9]+)\.b, #-1
     248  **	movprfx	z0\.s, p0/z, z0\.s
     249  **	usqadd	z0\.s, p0/m, z0\.s, \1\.s
     250  **	ret
     251  */
     252  TEST_UNIFORM_Z (sqadd_m1_u32_z, svuint32_t,
     253  		z0 = svsqadd_n_u32_z (p0, z0, -1),
     254  		z0 = svsqadd_z (p0, z0, -1))
     255  
     256  /*
     257  ** sqadd_m127_u32_z:
     258  **	mov	(z[0-9]+\.s), #-127
     259  **	movprfx	z0\.s, p0/z, z0\.s
     260  **	usqadd	z0\.s, p0/m, z0\.s, \1
     261  **	ret
     262  */
     263  TEST_UNIFORM_Z (sqadd_m127_u32_z, svuint32_t,
     264  		z0 = svsqadd_n_u32_z (p0, z0, -127),
     265  		z0 = svsqadd_z (p0, z0, -127))
     266  
     267  /*
     268  ** sqadd_m128_u32_z:
     269  **	mov	(z[0-9]+\.s), #-128
     270  **	movprfx	z0\.s, p0/z, z0\.s
     271  **	usqadd	z0\.s, p0/m, z0\.s, \1
     272  **	ret
     273  */
     274  TEST_UNIFORM_Z (sqadd_m128_u32_z, svuint32_t,
     275  		z0 = svsqadd_n_u32_z (p0, z0, -128),
     276  		z0 = svsqadd_z (p0, z0, -128))
     277  
     278  /*
     279  ** sqadd_u32_x_tied1:
     280  **	usqadd	z0\.s, p0/m, z0\.s, z4\.s
     281  **	ret
     282  */
     283  TEST_DUAL_Z (sqadd_u32_x_tied1, svuint32_t, svint32_t,
     284  	     z0 = svsqadd_u32_x (p0, z0, z4),
     285  	     z0 = svsqadd_x (p0, z0, z4))
     286  
     287  /*
     288  ** sqadd_u32_x_tied2:
     289  **	mov	(z[0-9]+)\.d, z0\.d
     290  **	movprfx	z0, z4
     291  **	usqadd	z0\.s, p0/m, z0\.s, \1\.s
     292  **	ret
     293  */
     294  TEST_DUAL_Z_REV (sqadd_u32_x_tied2, svuint32_t, svint32_t,
     295  		 z0_res = svsqadd_u32_x (p0, z4, z0),
     296  		 z0_res = svsqadd_x (p0, z4, z0))
     297  
     298  /*
     299  ** sqadd_u32_x_untied:
     300  **	movprfx	z0, z1
     301  **	usqadd	z0\.s, p0/m, z0\.s, z4\.s
     302  **	ret
     303  */
     304  TEST_DUAL_Z (sqadd_u32_x_untied, svuint32_t, svint32_t,
     305  	     z0 = svsqadd_u32_x (p0, z1, z4),
     306  	     z0 = svsqadd_x (p0, z1, z4))
     307  
     308  /*
     309  ** sqadd_w0_u32_x_tied1:
     310  **	mov	(z[0-9]+\.s), w0
     311  **	usqadd	z0\.s, p0/m, z0\.s, \1
     312  **	ret
     313  */
     314  TEST_UNIFORM_ZX (sqadd_w0_u32_x_tied1, svuint32_t, int32_t,
     315  		 z0 = svsqadd_n_u32_x (p0, z0, x0),
     316  		 z0 = svsqadd_x (p0, z0, x0))
     317  
     318  /*
     319  ** sqadd_w0_u32_x_untied:
     320  **	mov	(z[0-9]+\.s), w0
     321  **	movprfx	z0, z1
     322  **	usqadd	z0\.s, p0/m, z0\.s, \1
     323  **	ret
     324  */
     325  TEST_UNIFORM_ZX (sqadd_w0_u32_x_untied, svuint32_t, int32_t,
     326  		 z0 = svsqadd_n_u32_x (p0, z1, x0),
     327  		 z0 = svsqadd_x (p0, z1, x0))
     328  
     329  /*
     330  ** sqadd_1_u32_x_tied1:
     331  **	uqadd	z0\.s, z0\.s, #1
     332  **	ret
     333  */
     334  TEST_UNIFORM_Z (sqadd_1_u32_x_tied1, svuint32_t,
     335  		z0 = svsqadd_n_u32_x (p0, z0, 1),
     336  		z0 = svsqadd_x (p0, z0, 1))
     337  
     338  /*
     339  ** sqadd_1_u32_x_untied:
     340  **	movprfx	z0, z1
     341  **	uqadd	z0\.s, z0\.s, #1
     342  **	ret
     343  */
     344  TEST_UNIFORM_Z (sqadd_1_u32_x_untied, svuint32_t,
     345  		z0 = svsqadd_n_u32_x (p0, z1, 1),
     346  		z0 = svsqadd_x (p0, z1, 1))
     347  
     348  /*
     349  ** sqadd_127_u32_x:
     350  **	uqadd	z0\.s, z0\.s, #127
     351  **	ret
     352  */
     353  TEST_UNIFORM_Z (sqadd_127_u32_x, svuint32_t,
     354  		z0 = svsqadd_n_u32_x (p0, z0, 127),
     355  		z0 = svsqadd_x (p0, z0, 127))
     356  
     357  /*
     358  ** sqadd_128_u32_x:
     359  **	uqadd	z0\.s, z0\.s, #128
     360  **	ret
     361  */
     362  TEST_UNIFORM_Z (sqadd_128_u32_x, svuint32_t,
     363  		z0 = svsqadd_n_u32_x (p0, z0, 128),
     364  		z0 = svsqadd_x (p0, z0, 128))
     365  
     366  /*
     367  ** sqadd_255_u32_x:
     368  **	uqadd	z0\.s, z0\.s, #255
     369  **	ret
     370  */
     371  TEST_UNIFORM_Z (sqadd_255_u32_x, svuint32_t,
     372  		z0 = svsqadd_n_u32_x (p0, z0, 255),
     373  		z0 = svsqadd_x (p0, z0, 255))
     374  
     375  /*
     376  ** sqadd_m1_u32_x:
     377  **	mov	(z[0-9]+)\.b, #-1
     378  **	usqadd	z0\.s, p0/m, z0\.s, \1\.s
     379  **	ret
     380  */
     381  TEST_UNIFORM_Z (sqadd_m1_u32_x, svuint32_t,
     382  		z0 = svsqadd_n_u32_x (p0, z0, -1),
     383  		z0 = svsqadd_x (p0, z0, -1))
     384  
     385  /*
     386  ** sqadd_m127_u32_x:
     387  **	mov	(z[0-9]+\.s), #-127
     388  **	usqadd	z0\.s, p0/m, z0\.s, \1
     389  **	ret
     390  */
     391  TEST_UNIFORM_Z (sqadd_m127_u32_x, svuint32_t,
     392  		z0 = svsqadd_n_u32_x (p0, z0, -127),
     393  		z0 = svsqadd_x (p0, z0, -127))
     394  
     395  /*
     396  ** sqadd_m128_u32_x:
     397  **	mov	(z[0-9]+\.s), #-128
     398  **	usqadd	z0\.s, p0/m, z0\.s, \1
     399  **	ret
     400  */
     401  TEST_UNIFORM_Z (sqadd_m128_u32_x, svuint32_t,
     402  		z0 = svsqadd_n_u32_x (p0, z0, -128),
     403  		z0 = svsqadd_x (p0, z0, -128))