1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** sub_u32_m_tied1:
       7  **	sub	z0\.s, p0/m, z0\.s, z1\.s
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (sub_u32_m_tied1, svuint32_t,
      11  		z0 = svsub_u32_m (p0, z0, z1),
      12  		z0 = svsub_m (p0, z0, z1))
      13  
      14  /*
      15  ** sub_u32_m_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z1
      18  **	sub	z0\.s, p0/m, z0\.s, \1\.s
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (sub_u32_m_tied2, svuint32_t,
      22  		z0 = svsub_u32_m (p0, z1, z0),
      23  		z0 = svsub_m (p0, z1, z0))
      24  
      25  /*
      26  ** sub_u32_m_untied:
      27  **	movprfx	z0, z1
      28  **	sub	z0\.s, p0/m, z0\.s, z2\.s
      29  **	ret
      30  */
      31  TEST_UNIFORM_Z (sub_u32_m_untied, svuint32_t,
      32  		z0 = svsub_u32_m (p0, z1, z2),
      33  		z0 = svsub_m (p0, z1, z2))
      34  
      35  /*
      36  ** sub_w0_u32_m_tied1:
      37  **	mov	(z[0-9]+\.s), w0
      38  **	sub	z0\.s, p0/m, z0\.s, \1
      39  **	ret
      40  */
      41  TEST_UNIFORM_ZX (sub_w0_u32_m_tied1, svuint32_t, uint32_t,
      42  		 z0 = svsub_n_u32_m (p0, z0, x0),
      43  		 z0 = svsub_m (p0, z0, x0))
      44  
      45  /*
      46  ** sub_w0_u32_m_untied:
      47  **	mov	(z[0-9]+\.s), w0
      48  **	movprfx	z0, z1
      49  **	sub	z0\.s, p0/m, z0\.s, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (sub_w0_u32_m_untied, svuint32_t, uint32_t,
      53  		 z0 = svsub_n_u32_m (p0, z1, x0),
      54  		 z0 = svsub_m (p0, z1, x0))
      55  
      56  /*
      57  ** sub_1_u32_m_tied1:
      58  **	mov	(z[0-9]+)\.b, #-1
      59  **	add	z0\.s, p0/m, z0\.s, \1\.s
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (sub_1_u32_m_tied1, svuint32_t,
      63  		z0 = svsub_n_u32_m (p0, z0, 1),
      64  		z0 = svsub_m (p0, z0, 1))
      65  
      66  /*
      67  ** sub_1_u32_m_untied: { xfail *-*-* }
      68  **	mov	(z[0-9]+)\.b, #-1
      69  **	movprfx	z0, z1
      70  **	add	z0\.s, p0/m, z0\.s, \1\.s
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (sub_1_u32_m_untied, svuint32_t,
      74  		z0 = svsub_n_u32_m (p0, z1, 1),
      75  		z0 = svsub_m (p0, z1, 1))
      76  
      77  /*
      78  ** sub_m2_u32_m:
      79  **	mov	(z[0-9]+\.s), #2
      80  **	add	z0\.s, p0/m, z0\.s, \1
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (sub_m2_u32_m, svuint32_t,
      84  		z0 = svsub_n_u32_m (p0, z0, -2),
      85  		z0 = svsub_m (p0, z0, -2))
      86  
      87  /*
      88  ** sub_u32_z_tied1:
      89  **	movprfx	z0\.s, p0/z, z0\.s
      90  **	sub	z0\.s, p0/m, z0\.s, z1\.s
      91  **	ret
      92  */
      93  TEST_UNIFORM_Z (sub_u32_z_tied1, svuint32_t,
      94  		z0 = svsub_u32_z (p0, z0, z1),
      95  		z0 = svsub_z (p0, z0, z1))
      96  
      97  /*
      98  ** sub_u32_z_tied2:
      99  **	movprfx	z0\.s, p0/z, z0\.s
     100  **	subr	z0\.s, p0/m, z0\.s, z1\.s
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (sub_u32_z_tied2, svuint32_t,
     104  		z0 = svsub_u32_z (p0, z1, z0),
     105  		z0 = svsub_z (p0, z1, z0))
     106  
     107  /*
     108  ** sub_u32_z_untied:
     109  ** (
     110  **	movprfx	z0\.s, p0/z, z1\.s
     111  **	sub	z0\.s, p0/m, z0\.s, z2\.s
     112  ** |
     113  **	movprfx	z0\.s, p0/z, z2\.s
     114  **	subr	z0\.s, p0/m, z0\.s, z1\.s
     115  ** )
     116  **	ret
     117  */
     118  TEST_UNIFORM_Z (sub_u32_z_untied, svuint32_t,
     119  		z0 = svsub_u32_z (p0, z1, z2),
     120  		z0 = svsub_z (p0, z1, z2))
     121  
     122  /*
     123  ** sub_w0_u32_z_tied1:
     124  **	mov	(z[0-9]+\.s), w0
     125  **	movprfx	z0\.s, p0/z, z0\.s
     126  **	sub	z0\.s, p0/m, z0\.s, \1
     127  **	ret
     128  */
     129  TEST_UNIFORM_ZX (sub_w0_u32_z_tied1, svuint32_t, uint32_t,
     130  		 z0 = svsub_n_u32_z (p0, z0, x0),
     131  		 z0 = svsub_z (p0, z0, x0))
     132  
     133  /*
     134  ** sub_w0_u32_z_untied:
     135  **	mov	(z[0-9]+\.s), w0
     136  ** (
     137  **	movprfx	z0\.s, p0/z, z1\.s
     138  **	sub	z0\.s, p0/m, z0\.s, \1
     139  ** |
     140  **	movprfx	z0\.s, p0/z, \1
     141  **	subr	z0\.s, p0/m, z0\.s, z1\.s
     142  ** )
     143  **	ret
     144  */
     145  TEST_UNIFORM_ZX (sub_w0_u32_z_untied, svuint32_t, uint32_t,
     146  		 z0 = svsub_n_u32_z (p0, z1, x0),
     147  		 z0 = svsub_z (p0, z1, x0))
     148  
     149  /*
     150  ** sub_1_u32_z_tied1:
     151  **	mov	(z[0-9]+)\.b, #-1
     152  **	movprfx	z0\.s, p0/z, z0\.s
     153  **	add	z0\.s, p0/m, z0\.s, \1\.s
     154  **	ret
     155  */
     156  TEST_UNIFORM_Z (sub_1_u32_z_tied1, svuint32_t,
     157  		z0 = svsub_n_u32_z (p0, z0, 1),
     158  		z0 = svsub_z (p0, z0, 1))
     159  
     160  /*
     161  ** sub_1_u32_z_untied:
     162  **	mov	(z[0-9]+)\.b, #-1
     163  ** (
     164  **	movprfx	z0\.s, p0/z, z1\.s
     165  **	add	z0\.s, p0/m, z0\.s, \1\.s
     166  ** |
     167  **	movprfx	z0\.s, p0/z, \1\.s
     168  **	add	z0\.s, p0/m, z0\.s, z1\.s
     169  ** )
     170  **	ret
     171  */
     172  TEST_UNIFORM_Z (sub_1_u32_z_untied, svuint32_t,
     173  		z0 = svsub_n_u32_z (p0, z1, 1),
     174  		z0 = svsub_z (p0, z1, 1))
     175  
     176  /*
     177  ** sub_u32_x_tied1:
     178  **	sub	z0\.s, z0\.s, z1\.s
     179  **	ret
     180  */
     181  TEST_UNIFORM_Z (sub_u32_x_tied1, svuint32_t,
     182  		z0 = svsub_u32_x (p0, z0, z1),
     183  		z0 = svsub_x (p0, z0, z1))
     184  
     185  /*
     186  ** sub_u32_x_tied2:
     187  **	sub	z0\.s, z1\.s, z0\.s
     188  **	ret
     189  */
     190  TEST_UNIFORM_Z (sub_u32_x_tied2, svuint32_t,
     191  		z0 = svsub_u32_x (p0, z1, z0),
     192  		z0 = svsub_x (p0, z1, z0))
     193  
     194  /*
     195  ** sub_u32_x_untied:
     196  **	sub	z0\.s, z1\.s, z2\.s
     197  **	ret
     198  */
     199  TEST_UNIFORM_Z (sub_u32_x_untied, svuint32_t,
     200  		z0 = svsub_u32_x (p0, z1, z2),
     201  		z0 = svsub_x (p0, z1, z2))
     202  
     203  /*
     204  ** sub_w0_u32_x_tied1:
     205  **	mov	(z[0-9]+\.s), w0
     206  **	sub	z0\.s, z0\.s, \1
     207  **	ret
     208  */
     209  TEST_UNIFORM_ZX (sub_w0_u32_x_tied1, svuint32_t, uint32_t,
     210  		 z0 = svsub_n_u32_x (p0, z0, x0),
     211  		 z0 = svsub_x (p0, z0, x0))
     212  
     213  /*
     214  ** sub_w0_u32_x_untied:
     215  **	mov	(z[0-9]+\.s), w0
     216  **	sub	z0\.s, z1\.s, \1
     217  **	ret
     218  */
     219  TEST_UNIFORM_ZX (sub_w0_u32_x_untied, svuint32_t, uint32_t,
     220  		 z0 = svsub_n_u32_x (p0, z1, x0),
     221  		 z0 = svsub_x (p0, z1, x0))
     222  
     223  /*
     224  ** sub_1_u32_x_tied1:
     225  **	sub	z0\.s, z0\.s, #1
     226  **	ret
     227  */
     228  TEST_UNIFORM_Z (sub_1_u32_x_tied1, svuint32_t,
     229  		z0 = svsub_n_u32_x (p0, z0, 1),
     230  		z0 = svsub_x (p0, z0, 1))
     231  
     232  /*
     233  ** sub_1_u32_x_untied:
     234  **	movprfx	z0, z1
     235  **	sub	z0\.s, z0\.s, #1
     236  **	ret
     237  */
     238  TEST_UNIFORM_Z (sub_1_u32_x_untied, svuint32_t,
     239  		z0 = svsub_n_u32_x (p0, z1, 1),
     240  		z0 = svsub_x (p0, z1, 1))
     241  
     242  /*
     243  ** sub_127_u32_x:
     244  **	sub	z0\.s, z0\.s, #127
     245  **	ret
     246  */
     247  TEST_UNIFORM_Z (sub_127_u32_x, svuint32_t,
     248  		z0 = svsub_n_u32_x (p0, z0, 127),
     249  		z0 = svsub_x (p0, z0, 127))
     250  
     251  /*
     252  ** sub_128_u32_x:
     253  **	sub	z0\.s, z0\.s, #128
     254  **	ret
     255  */
     256  TEST_UNIFORM_Z (sub_128_u32_x, svuint32_t,
     257  		z0 = svsub_n_u32_x (p0, z0, 128),
     258  		z0 = svsub_x (p0, z0, 128))
     259  
     260  /*
     261  ** sub_255_u32_x:
     262  **	sub	z0\.s, z0\.s, #255
     263  **	ret
     264  */
     265  TEST_UNIFORM_Z (sub_255_u32_x, svuint32_t,
     266  		z0 = svsub_n_u32_x (p0, z0, 255),
     267  		z0 = svsub_x (p0, z0, 255))
     268  
     269  /*
     270  ** sub_256_u32_x:
     271  **	sub	z0\.s, z0\.s, #256
     272  **	ret
     273  */
     274  TEST_UNIFORM_Z (sub_256_u32_x, svuint32_t,
     275  		z0 = svsub_n_u32_x (p0, z0, 256),
     276  		z0 = svsub_x (p0, z0, 256))
     277  
     278  /*
     279  ** sub_511_u32_x:
     280  **	mov	(z[0-9]+\.s), #-511
     281  **	add	z0\.s, (z0\.s, \1|\1, z0\.s)
     282  **	ret
     283  */
     284  TEST_UNIFORM_Z (sub_511_u32_x, svuint32_t,
     285  		z0 = svsub_n_u32_x (p0, z0, 511),
     286  		z0 = svsub_x (p0, z0, 511))
     287  
     288  /*
     289  ** sub_512_u32_x:
     290  **	sub	z0\.s, z0\.s, #512
     291  **	ret
     292  */
     293  TEST_UNIFORM_Z (sub_512_u32_x, svuint32_t,
     294  		z0 = svsub_n_u32_x (p0, z0, 512),
     295  		z0 = svsub_x (p0, z0, 512))
     296  
     297  /*
     298  ** sub_65280_u32_x:
     299  **	sub	z0\.s, z0\.s, #65280
     300  **	ret
     301  */
     302  TEST_UNIFORM_Z (sub_65280_u32_x, svuint32_t,
     303  		z0 = svsub_n_u32_x (p0, z0, 0xff00),
     304  		z0 = svsub_x (p0, z0, 0xff00))
     305  
     306  /*
     307  ** sub_65535_u32_x:
     308  **	mov	(z[0-9]+\.s), #-65535
     309  **	add	z0\.s, (z0\.s, \1|\1, z0\.s)
     310  **	ret
     311  */
     312  TEST_UNIFORM_Z (sub_65535_u32_x, svuint32_t,
     313  		z0 = svsub_n_u32_x (p0, z0, 65535),
     314  		z0 = svsub_x (p0, z0, 65535))
     315  
     316  /*
     317  ** sub_65536_u32_x:
     318  **	mov	(z[0-9]+\.s), #-65536
     319  **	add	z0\.s, (z0\.s, \1|\1, z0\.s)
     320  **	ret
     321  */
     322  TEST_UNIFORM_Z (sub_65536_u32_x, svuint32_t,
     323  		z0 = svsub_n_u32_x (p0, z0, 65536),
     324  		z0 = svsub_x (p0, z0, 65536))
     325  
     326  /*
     327  ** sub_m1_u32_x:
     328  **	add	z0\.s, z0\.s, #1
     329  **	ret
     330  */
     331  TEST_UNIFORM_Z (sub_m1_u32_x, svuint32_t,
     332  		z0 = svsub_n_u32_x (p0, z0, -1),
     333  		z0 = svsub_x (p0, z0, -1))
     334  
     335  /*
     336  ** sub_m127_u32_x:
     337  **	add	z0\.s, z0\.s, #127
     338  **	ret
     339  */
     340  TEST_UNIFORM_Z (sub_m127_u32_x, svuint32_t,
     341  		z0 = svsub_n_u32_x (p0, z0, -127),
     342  		z0 = svsub_x (p0, z0, -127))
     343  
     344  /*
     345  ** sub_m128_u32_x:
     346  **	add	z0\.s, z0\.s, #128
     347  **	ret
     348  */
     349  TEST_UNIFORM_Z (sub_m128_u32_x, svuint32_t,
     350  		z0 = svsub_n_u32_x (p0, z0, -128),
     351  		z0 = svsub_x (p0, z0, -128))
     352  
     353  /*
     354  ** sub_m255_u32_x:
     355  **	add	z0\.s, z0\.s, #255
     356  **	ret
     357  */
     358  TEST_UNIFORM_Z (sub_m255_u32_x, svuint32_t,
     359  		z0 = svsub_n_u32_x (p0, z0, -255),
     360  		z0 = svsub_x (p0, z0, -255))
     361  
     362  /*
     363  ** sub_m256_u32_x:
     364  **	add	z0\.s, z0\.s, #256
     365  **	ret
     366  */
     367  TEST_UNIFORM_Z (sub_m256_u32_x, svuint32_t,
     368  		z0 = svsub_n_u32_x (p0, z0, -256),
     369  		z0 = svsub_x (p0, z0, -256))
     370  
     371  /*
     372  ** sub_m511_u32_x:
     373  **	mov	(z[0-9]+\.s), #511
     374  **	add	z0\.s, (z0\.s, \1|\1, z0\.s)
     375  **	ret
     376  */
     377  TEST_UNIFORM_Z (sub_m511_u32_x, svuint32_t,
     378  		z0 = svsub_n_u32_x (p0, z0, -511),
     379  		z0 = svsub_x (p0, z0, -511))
     380  
     381  /*
     382  ** sub_m512_u32_x:
     383  **	add	z0\.s, z0\.s, #512
     384  **	ret
     385  */
     386  TEST_UNIFORM_Z (sub_m512_u32_x, svuint32_t,
     387  		z0 = svsub_n_u32_x (p0, z0, -512),
     388  		z0 = svsub_x (p0, z0, -512))
     389  
     390  /*
     391  ** sub_m32768_u32_x:
     392  **	add	z0\.s, z0\.s, #32768
     393  **	ret
     394  */
     395  TEST_UNIFORM_Z (sub_m32768_u32_x, svuint32_t,
     396  		z0 = svsub_n_u32_x (p0, z0, -0x8000),
     397  		z0 = svsub_x (p0, z0, -0x8000))
     398  
     399  /*
     400  ** sub_m65280_u32_x:
     401  **	add	z0\.s, z0\.s, #65280
     402  **	ret
     403  */
     404  TEST_UNIFORM_Z (sub_m65280_u32_x, svuint32_t,
     405  		z0 = svsub_n_u32_x (p0, z0, -0xff00),
     406  		z0 = svsub_x (p0, z0, -0xff00))
     407  
     408  /*
     409  ** sub_m65535_u32_x:
     410  **	mov	(z[0-9]+\.s), #65535
     411  **	add	z0\.s, (z0\.s, \1|\1, z0\.s)
     412  **	ret
     413  */
     414  TEST_UNIFORM_Z (sub_m65535_u32_x, svuint32_t,
     415  		z0 = svsub_n_u32_x (p0, z0, -65535),
     416  		z0 = svsub_x (p0, z0, -65535))
     417  
     418  /*
     419  ** sub_m65536_u32_x:
     420  **	mov	(z[0-9]+\.s), #65536
     421  **	add	z0\.s, (z0\.s, \1|\1, z0\.s)
     422  **	ret
     423  */
     424  TEST_UNIFORM_Z (sub_m65536_u32_x, svuint32_t,
     425  		z0 = svsub_n_u32_x (p0, z0, -65536),
     426  		z0 = svsub_x (p0, z0, -65536))