1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qadd_u32_tied1:
       7  **	uqadd	z0\.s, (z0\.s, z1\.s|z1\.s, z0\.s)
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qadd_u32_tied1, svuint32_t,
      11  		z0 = svqadd_u32 (z0, z1),
      12  		z0 = svqadd (z0, z1))
      13  
      14  /*
      15  ** qadd_u32_tied2:
      16  **	uqadd	z0\.s, (z0\.s, z1\.s|z1\.s, z0\.s)
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (qadd_u32_tied2, svuint32_t,
      20  		z0 = svqadd_u32 (z1, z0),
      21  		z0 = svqadd (z1, z0))
      22  
      23  /*
      24  ** qadd_u32_untied:
      25  **	uqadd	z0\.s, (z1\.s, z2\.s|z2\.s, z1\.s)
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (qadd_u32_untied, svuint32_t,
      29  		z0 = svqadd_u32 (z1, z2),
      30  		z0 = svqadd (z1, z2))
      31  
      32  /*
      33  ** qadd_w0_u32_tied1:
      34  **	mov	(z[0-9]+\.s), w0
      35  **	uqadd	z0\.s, (z0\.s, \1|\1, z0\.s)
      36  **	ret
      37  */
      38  TEST_UNIFORM_ZX (qadd_w0_u32_tied1, svuint32_t, uint32_t,
      39  		 z0 = svqadd_n_u32 (z0, x0),
      40  		 z0 = svqadd (z0, x0))
      41  
      42  /*
      43  ** qadd_w0_u32_untied:
      44  **	mov	(z[0-9]+\.s), w0
      45  **	uqadd	z0\.s, (z1\.s, \1|\1, z1\.s)
      46  **	ret
      47  */
      48  TEST_UNIFORM_ZX (qadd_w0_u32_untied, svuint32_t, uint32_t,
      49  		 z0 = svqadd_n_u32 (z1, x0),
      50  		 z0 = svqadd (z1, x0))
      51  
      52  /*
      53  ** qadd_1_u32_tied1:
      54  **	uqadd	z0\.s, z0\.s, #1
      55  **	ret
      56  */
      57  TEST_UNIFORM_Z (qadd_1_u32_tied1, svuint32_t,
      58  		z0 = svqadd_n_u32 (z0, 1),
      59  		z0 = svqadd (z0, 1))
      60  
      61  /*
      62  ** qadd_1_u32_untied:
      63  **	movprfx	z0, z1
      64  **	uqadd	z0\.s, z0\.s, #1
      65  **	ret
      66  */
      67  TEST_UNIFORM_Z (qadd_1_u32_untied, svuint32_t,
      68  		z0 = svqadd_n_u32 (z1, 1),
      69  		z0 = svqadd (z1, 1))
      70  
      71  /*
      72  ** qadd_127_u32:
      73  **	uqadd	z0\.s, z0\.s, #127
      74  **	ret
      75  */
      76  TEST_UNIFORM_Z (qadd_127_u32, svuint32_t,
      77  		z0 = svqadd_n_u32 (z0, 127),
      78  		z0 = svqadd (z0, 127))
      79  
      80  /*
      81  ** qadd_128_u32:
      82  **	uqadd	z0\.s, z0\.s, #128
      83  **	ret
      84  */
      85  TEST_UNIFORM_Z (qadd_128_u32, svuint32_t,
      86  		z0 = svqadd_n_u32 (z0, 128),
      87  		z0 = svqadd (z0, 128))
      88  
      89  /*
      90  ** qadd_255_u32:
      91  **	uqadd	z0\.s, z0\.s, #255
      92  **	ret
      93  */
      94  TEST_UNIFORM_Z (qadd_255_u32, svuint32_t,
      95  		z0 = svqadd_n_u32 (z0, 255),
      96  		z0 = svqadd (z0, 255))
      97  
      98  /*
      99  ** qadd_m1_u32:
     100  **	mov	(z[0-9]+)\.b, #-1
     101  **	uqadd	z0\.s, (z0\.s, \1\.s|\1\.s, z0\.s)
     102  **	ret
     103  */
     104  TEST_UNIFORM_Z (qadd_m1_u32, svuint32_t,
     105  		z0 = svqadd_n_u32 (z0, -1),
     106  		z0 = svqadd (z0, -1))
     107  
     108  /*
     109  ** qadd_m127_u32:
     110  **	mov	(z[0-9]+\.s), #-127
     111  **	uqadd	z0\.s, (z0\.s, \1|\1, z0\.s)
     112  **	ret
     113  */
     114  TEST_UNIFORM_Z (qadd_m127_u32, svuint32_t,
     115  		z0 = svqadd_n_u32 (z0, -127),
     116  		z0 = svqadd (z0, -127))
     117  
     118  /*
     119  ** qadd_m128_u32:
     120  **	mov	(z[0-9]+\.s), #-128
     121  **	uqadd	z0\.s, (z0\.s, \1|\1, z0\.s)
     122  **	ret
     123  */
     124  TEST_UNIFORM_Z (qadd_m128_u32, svuint32_t,
     125  		z0 = svqadd_n_u32 (z0, -128),
     126  		z0 = svqadd (z0, -128))
     127  
     128  /*
     129  ** qadd_u32_m_tied1:
     130  **	uqadd	z0\.s, p0/m, z0\.s, z1\.s
     131  **	ret
     132  */
     133  TEST_UNIFORM_Z (qadd_u32_m_tied1, svuint32_t,
     134  		z0 = svqadd_u32_m (p0, z0, z1),
     135  		z0 = svqadd_m (p0, z0, z1))
     136  
     137  /*
     138  ** qadd_u32_m_tied2:
     139  **	mov	(z[0-9]+)\.d, z0\.d
     140  **	movprfx	z0, z1
     141  **	uqadd	z0\.s, p0/m, z0\.s, \1\.s
     142  **	ret
     143  */
     144  TEST_UNIFORM_Z (qadd_u32_m_tied2, svuint32_t,
     145  		z0 = svqadd_u32_m (p0, z1, z0),
     146  		z0 = svqadd_m (p0, z1, z0))
     147  
     148  /*
     149  ** qadd_u32_m_untied:
     150  **	movprfx	z0, z1
     151  **	uqadd	z0\.s, p0/m, z0\.s, z2\.s
     152  **	ret
     153  */
     154  TEST_UNIFORM_Z (qadd_u32_m_untied, svuint32_t,
     155  		z0 = svqadd_u32_m (p0, z1, z2),
     156  		z0 = svqadd_m (p0, z1, z2))
     157  
     158  /*
     159  ** qadd_w0_u32_m_tied1:
     160  **	mov	(z[0-9]+\.s), w0
     161  **	uqadd	z0\.s, p0/m, z0\.s, \1
     162  **	ret
     163  */
     164  TEST_UNIFORM_ZX (qadd_w0_u32_m_tied1, svuint32_t, uint32_t,
     165  		 z0 = svqadd_n_u32_m (p0, z0, x0),
     166  		 z0 = svqadd_m (p0, z0, x0))
     167  
     168  /*
     169  ** qadd_w0_u32_m_untied:
     170  **	mov	(z[0-9]+\.s), w0
     171  **	movprfx	z0, z1
     172  **	uqadd	z0\.s, p0/m, z0\.s, \1
     173  **	ret
     174  */
     175  TEST_UNIFORM_ZX (qadd_w0_u32_m_untied, svuint32_t, uint32_t,
     176  		 z0 = svqadd_n_u32_m (p0, z1, x0),
     177  		 z0 = svqadd_m (p0, z1, x0))
     178  
     179  /*
     180  ** qadd_1_u32_m_tied1:
     181  **	mov	(z[0-9]+\.s), #1
     182  **	uqadd	z0\.s, p0/m, z0\.s, \1
     183  **	ret
     184  */
     185  TEST_UNIFORM_Z (qadd_1_u32_m_tied1, svuint32_t,
     186  		z0 = svqadd_n_u32_m (p0, z0, 1),
     187  		z0 = svqadd_m (p0, z0, 1))
     188  
     189  /*
     190  ** qadd_1_u32_m_untied: { xfail *-*-* }
     191  **	mov	(z[0-9]+\.s), #1
     192  **	movprfx	z0, z1
     193  **	uqadd	z0\.s, p0/m, z0\.s, \1
     194  **	ret
     195  */
     196  TEST_UNIFORM_Z (qadd_1_u32_m_untied, svuint32_t,
     197  		z0 = svqadd_n_u32_m (p0, z1, 1),
     198  		z0 = svqadd_m (p0, z1, 1))
     199  
     200  /*
     201  ** qadd_127_u32_m:
     202  **	mov	(z[0-9]+\.s), #127
     203  **	uqadd	z0\.s, p0/m, z0\.s, \1
     204  **	ret
     205  */
     206  TEST_UNIFORM_Z (qadd_127_u32_m, svuint32_t,
     207  		z0 = svqadd_n_u32_m (p0, z0, 127),
     208  		z0 = svqadd_m (p0, z0, 127))
     209  
     210  /*
     211  ** qadd_128_u32_m:
     212  **	mov	(z[0-9]+\.s), #128
     213  **	uqadd	z0\.s, p0/m, z0\.s, \1
     214  **	ret
     215  */
     216  TEST_UNIFORM_Z (qadd_128_u32_m, svuint32_t,
     217  		z0 = svqadd_n_u32_m (p0, z0, 128),
     218  		z0 = svqadd_m (p0, z0, 128))
     219  
     220  /*
     221  ** qadd_255_u32_m:
     222  **	mov	(z[0-9]+\.s), #255
     223  **	uqadd	z0\.s, p0/m, z0\.s, \1
     224  **	ret
     225  */
     226  TEST_UNIFORM_Z (qadd_255_u32_m, svuint32_t,
     227  		z0 = svqadd_n_u32_m (p0, z0, 255),
     228  		z0 = svqadd_m (p0, z0, 255))
     229  
     230  /*
     231  ** qadd_m1_u32_m:
     232  **	mov	(z[0-9]+)\.b, #-1
     233  **	uqadd	z0\.s, p0/m, z0\.s, \1\.s
     234  **	ret
     235  */
     236  TEST_UNIFORM_Z (qadd_m1_u32_m, svuint32_t,
     237  		z0 = svqadd_n_u32_m (p0, z0, -1),
     238  		z0 = svqadd_m (p0, z0, -1))
     239  
     240  /*
     241  ** qadd_m127_u32_m:
     242  **	mov	(z[0-9]+\.s), #-127
     243  **	uqadd	z0\.s, p0/m, z0\.s, \1
     244  **	ret
     245  */
     246  TEST_UNIFORM_Z (qadd_m127_u32_m, svuint32_t,
     247  		z0 = svqadd_n_u32_m (p0, z0, -127),
     248  		z0 = svqadd_m (p0, z0, -127))
     249  
     250  /*
     251  ** qadd_m128_u32_m:
     252  **	mov	(z[0-9]+\.s), #-128
     253  **	uqadd	z0\.s, p0/m, z0\.s, \1
     254  **	ret
     255  */
     256  TEST_UNIFORM_Z (qadd_m128_u32_m, svuint32_t,
     257  		z0 = svqadd_n_u32_m (p0, z0, -128),
     258  		z0 = svqadd_m (p0, z0, -128))
     259  
     260  /*
     261  ** qadd_u32_z_tied1:
     262  **	movprfx	z0\.s, p0/z, z0\.s
     263  **	uqadd	z0\.s, p0/m, z0\.s, z1\.s
     264  **	ret
     265  */
     266  TEST_UNIFORM_Z (qadd_u32_z_tied1, svuint32_t,
     267  		z0 = svqadd_u32_z (p0, z0, z1),
     268  		z0 = svqadd_z (p0, z0, z1))
     269  
     270  /*
     271  ** qadd_u32_z_tied2:
     272  **	movprfx	z0\.s, p0/z, z0\.s
     273  **	uqadd	z0\.s, p0/m, z0\.s, z1\.s
     274  **	ret
     275  */
     276  TEST_UNIFORM_Z (qadd_u32_z_tied2, svuint32_t,
     277  		z0 = svqadd_u32_z (p0, z1, z0),
     278  		z0 = svqadd_z (p0, z1, z0))
     279  
     280  /*
     281  ** qadd_u32_z_untied:
     282  ** (
     283  **	movprfx	z0\.s, p0/z, z1\.s
     284  **	uqadd	z0\.s, p0/m, z0\.s, z2\.s
     285  ** |
     286  **	movprfx	z0\.s, p0/z, z2\.s
     287  **	uqadd	z0\.s, p0/m, z0\.s, z1\.s
     288  ** )
     289  **	ret
     290  */
     291  TEST_UNIFORM_Z (qadd_u32_z_untied, svuint32_t,
     292  		z0 = svqadd_u32_z (p0, z1, z2),
     293  		z0 = svqadd_z (p0, z1, z2))
     294  
     295  /*
     296  ** qadd_w0_u32_z_tied1:
     297  **	mov	(z[0-9]+\.s), w0
     298  **	movprfx	z0\.s, p0/z, z0\.s
     299  **	uqadd	z0\.s, p0/m, z0\.s, \1
     300  **	ret
     301  */
     302  TEST_UNIFORM_ZX (qadd_w0_u32_z_tied1, svuint32_t, uint32_t,
     303  		 z0 = svqadd_n_u32_z (p0, z0, x0),
     304  		 z0 = svqadd_z (p0, z0, x0))
     305  
     306  /*
     307  ** qadd_w0_u32_z_untied:
     308  **	mov	(z[0-9]+\.s), w0
     309  ** (
     310  **	movprfx	z0\.s, p0/z, z1\.s
     311  **	uqadd	z0\.s, p0/m, z0\.s, \1
     312  ** |
     313  **	movprfx	z0\.s, p0/z, \1
     314  **	uqadd	z0\.s, p0/m, z0\.s, z1\.s
     315  ** )
     316  **	ret
     317  */
     318  TEST_UNIFORM_ZX (qadd_w0_u32_z_untied, svuint32_t, uint32_t,
     319  		 z0 = svqadd_n_u32_z (p0, z1, x0),
     320  		 z0 = svqadd_z (p0, z1, x0))
     321  
     322  /*
     323  ** qadd_1_u32_z_tied1:
     324  **	mov	(z[0-9]+\.s), #1
     325  **	movprfx	z0\.s, p0/z, z0\.s
     326  **	uqadd	z0\.s, p0/m, z0\.s, \1
     327  **	ret
     328  */
     329  TEST_UNIFORM_Z (qadd_1_u32_z_tied1, svuint32_t,
     330  		z0 = svqadd_n_u32_z (p0, z0, 1),
     331  		z0 = svqadd_z (p0, z0, 1))
     332  
     333  /*
     334  ** qadd_1_u32_z_untied:
     335  **	mov	(z[0-9]+\.s), #1
     336  ** (
     337  **	movprfx	z0\.s, p0/z, z1\.s
     338  **	uqadd	z0\.s, p0/m, z0\.s, \1
     339  ** |
     340  **	movprfx	z0\.s, p0/z, \1
     341  **	uqadd	z0\.s, p0/m, z0\.s, z1\.s
     342  ** )
     343  **	ret
     344  */
     345  TEST_UNIFORM_Z (qadd_1_u32_z_untied, svuint32_t,
     346  		z0 = svqadd_n_u32_z (p0, z1, 1),
     347  		z0 = svqadd_z (p0, z1, 1))
     348  
     349  /*
     350  ** qadd_127_u32_z:
     351  **	mov	(z[0-9]+\.s), #127
     352  **	movprfx	z0\.s, p0/z, z0\.s
     353  **	uqadd	z0\.s, p0/m, z0\.s, \1
     354  **	ret
     355  */
     356  TEST_UNIFORM_Z (qadd_127_u32_z, svuint32_t,
     357  		z0 = svqadd_n_u32_z (p0, z0, 127),
     358  		z0 = svqadd_z (p0, z0, 127))
     359  
     360  /*
     361  ** qadd_128_u32_z:
     362  **	mov	(z[0-9]+\.s), #128
     363  **	movprfx	z0\.s, p0/z, z0\.s
     364  **	uqadd	z0\.s, p0/m, z0\.s, \1
     365  **	ret
     366  */
     367  TEST_UNIFORM_Z (qadd_128_u32_z, svuint32_t,
     368  		z0 = svqadd_n_u32_z (p0, z0, 128),
     369  		z0 = svqadd_z (p0, z0, 128))
     370  
     371  /*
     372  ** qadd_255_u32_z:
     373  **	mov	(z[0-9]+\.s), #255
     374  **	movprfx	z0\.s, p0/z, z0\.s
     375  **	uqadd	z0\.s, p0/m, z0\.s, \1
     376  **	ret
     377  */
     378  TEST_UNIFORM_Z (qadd_255_u32_z, svuint32_t,
     379  		z0 = svqadd_n_u32_z (p0, z0, 255),
     380  		z0 = svqadd_z (p0, z0, 255))
     381  
     382  /*
     383  ** qadd_m1_u32_z:
     384  **	mov	(z[0-9]+)\.b, #-1
     385  **	movprfx	z0\.s, p0/z, z0\.s
     386  **	uqadd	z0\.s, p0/m, z0\.s, \1\.s
     387  **	ret
     388  */
     389  TEST_UNIFORM_Z (qadd_m1_u32_z, svuint32_t,
     390  		z0 = svqadd_n_u32_z (p0, z0, -1),
     391  		z0 = svqadd_z (p0, z0, -1))
     392  
     393  /*
     394  ** qadd_m127_u32_z:
     395  **	mov	(z[0-9]+\.s), #-127
     396  **	movprfx	z0\.s, p0/z, z0\.s
     397  **	uqadd	z0\.s, p0/m, z0\.s, \1
     398  **	ret
     399  */
     400  TEST_UNIFORM_Z (qadd_m127_u32_z, svuint32_t,
     401  		z0 = svqadd_n_u32_z (p0, z0, -127),
     402  		z0 = svqadd_z (p0, z0, -127))
     403  
     404  /*
     405  ** qadd_m128_u32_z:
     406  **	mov	(z[0-9]+\.s), #-128
     407  **	movprfx	z0\.s, p0/z, z0\.s
     408  **	uqadd	z0\.s, p0/m, z0\.s, \1
     409  **	ret
     410  */
     411  TEST_UNIFORM_Z (qadd_m128_u32_z, svuint32_t,
     412  		z0 = svqadd_n_u32_z (p0, z0, -128),
     413  		z0 = svqadd_z (p0, z0, -128))
     414  
     415  /*
     416  ** qadd_u32_x_tied1:
     417  **	uqadd	z0\.s, (z0\.s, z1\.s|z1\.s, z0\.s)
     418  **	ret
     419  */
     420  TEST_UNIFORM_Z (qadd_u32_x_tied1, svuint32_t,
     421  		z0 = svqadd_u32_x (p0, z0, z1),
     422  		z0 = svqadd_x (p0, z0, z1))
     423  
     424  /*
     425  ** qadd_u32_x_tied2:
     426  **	uqadd	z0\.s, (z0\.s, z1\.s|z1\.s, z0\.s)
     427  **	ret
     428  */
     429  TEST_UNIFORM_Z (qadd_u32_x_tied2, svuint32_t,
     430  		z0 = svqadd_u32_x (p0, z1, z0),
     431  		z0 = svqadd_x (p0, z1, z0))
     432  
     433  /*
     434  ** qadd_u32_x_untied:
     435  **	uqadd	z0\.s, (z1\.s, z2\.s|z2\.s, z1\.s)
     436  **	ret
     437  */
     438  TEST_UNIFORM_Z (qadd_u32_x_untied, svuint32_t,
     439  		z0 = svqadd_u32_x (p0, z1, z2),
     440  		z0 = svqadd_x (p0, z1, z2))
     441  
     442  /*
     443  ** qadd_w0_u32_x_tied1:
     444  **	mov	(z[0-9]+\.s), w0
     445  **	uqadd	z0\.s, (z0\.s, \1|\1, z0\.s)
     446  **	ret
     447  */
     448  TEST_UNIFORM_ZX (qadd_w0_u32_x_tied1, svuint32_t, uint32_t,
     449  		 z0 = svqadd_n_u32_x (p0, z0, x0),
     450  		 z0 = svqadd_x (p0, z0, x0))
     451  
     452  /*
     453  ** qadd_w0_u32_x_untied:
     454  **	mov	(z[0-9]+\.s), w0
     455  **	uqadd	z0\.s, (z1\.s, \1|\1, z1\.s)
     456  **	ret
     457  */
     458  TEST_UNIFORM_ZX (qadd_w0_u32_x_untied, svuint32_t, uint32_t,
     459  		 z0 = svqadd_n_u32_x (p0, z1, x0),
     460  		 z0 = svqadd_x (p0, z1, x0))
     461  
     462  /*
     463  ** qadd_1_u32_x_tied1:
     464  **	uqadd	z0\.s, z0\.s, #1
     465  **	ret
     466  */
     467  TEST_UNIFORM_Z (qadd_1_u32_x_tied1, svuint32_t,
     468  		z0 = svqadd_n_u32_x (p0, z0, 1),
     469  		z0 = svqadd_x (p0, z0, 1))
     470  
     471  /*
     472  ** qadd_1_u32_x_untied:
     473  **	movprfx	z0, z1
     474  **	uqadd	z0\.s, z0\.s, #1
     475  **	ret
     476  */
     477  TEST_UNIFORM_Z (qadd_1_u32_x_untied, svuint32_t,
     478  		z0 = svqadd_n_u32_x (p0, z1, 1),
     479  		z0 = svqadd_x (p0, z1, 1))
     480  
     481  /*
     482  ** qadd_127_u32_x:
     483  **	uqadd	z0\.s, z0\.s, #127
     484  **	ret
     485  */
     486  TEST_UNIFORM_Z (qadd_127_u32_x, svuint32_t,
     487  		z0 = svqadd_n_u32_x (p0, z0, 127),
     488  		z0 = svqadd_x (p0, z0, 127))
     489  
     490  /*
     491  ** qadd_128_u32_x:
     492  **	uqadd	z0\.s, z0\.s, #128
     493  **	ret
     494  */
     495  TEST_UNIFORM_Z (qadd_128_u32_x, svuint32_t,
     496  		z0 = svqadd_n_u32_x (p0, z0, 128),
     497  		z0 = svqadd_x (p0, z0, 128))
     498  
     499  /*
     500  ** qadd_255_u32_x:
     501  **	uqadd	z0\.s, z0\.s, #255
     502  **	ret
     503  */
     504  TEST_UNIFORM_Z (qadd_255_u32_x, svuint32_t,
     505  		z0 = svqadd_n_u32_x (p0, z0, 255),
     506  		z0 = svqadd_x (p0, z0, 255))
     507  
     508  /*
     509  ** qadd_m1_u32_x:
     510  **	mov	(z[0-9]+)\.b, #-1
     511  **	uqadd	z0\.s, (z0\.s, \1\.s|\1\.s, z0\.s)
     512  **	ret
     513  */
     514  TEST_UNIFORM_Z (qadd_m1_u32_x, svuint32_t,
     515  		z0 = svqadd_n_u32_x (p0, z0, -1),
     516  		z0 = svqadd_x (p0, z0, -1))
     517  
     518  /*
     519  ** qadd_m127_u32_x:
     520  **	mov	(z[0-9]+\.s), #-127
     521  **	uqadd	z0\.s, (z0\.s, \1|\1, z0\.s)
     522  **	ret
     523  */
     524  TEST_UNIFORM_Z (qadd_m127_u32_x, svuint32_t,
     525  		z0 = svqadd_n_u32_x (p0, z0, -127),
     526  		z0 = svqadd_x (p0, z0, -127))
     527  
     528  /*
     529  ** qadd_m128_u32_x:
     530  **	mov	(z[0-9]+\.s), #-128
     531  **	uqadd	z0\.s, (z0\.s, \1|\1, z0\.s)
     532  **	ret
     533  */
     534  TEST_UNIFORM_Z (qadd_m128_u32_x, svuint32_t,
     535  		z0 = svqadd_n_u32_x (p0, z0, -128),
     536  		z0 = svqadd_x (p0, z0, -128))