(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve2/
acle/
asm/
qadd_s16.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qadd_s16_tied1:
       7  **	sqadd	z0\.h, (z0\.h, z1\.h|z1\.h, z0\.h)
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qadd_s16_tied1, svint16_t,
      11  		z0 = svqadd_s16 (z0, z1),
      12  		z0 = svqadd (z0, z1))
      13  
      14  /*
      15  ** qadd_s16_tied2:
      16  **	sqadd	z0\.h, (z0\.h, z1\.h|z1\.h, z0\.h)
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (qadd_s16_tied2, svint16_t,
      20  		z0 = svqadd_s16 (z1, z0),
      21  		z0 = svqadd (z1, z0))
      22  
      23  /*
      24  ** qadd_s16_untied:
      25  **	sqadd	z0\.h, (z1\.h, z2\.h|z2\.h, z1\.h)
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (qadd_s16_untied, svint16_t,
      29  		z0 = svqadd_s16 (z1, z2),
      30  		z0 = svqadd (z1, z2))
      31  
      32  /*
      33  ** qadd_w0_s16_tied1:
      34  **	mov	(z[0-9]+\.h), w0
      35  **	sqadd	z0\.h, (z0\.h, \1|\1, z0\.h)
      36  **	ret
      37  */
      38  TEST_UNIFORM_ZX (qadd_w0_s16_tied1, svint16_t, int16_t,
      39  		 z0 = svqadd_n_s16 (z0, x0),
      40  		 z0 = svqadd (z0, x0))
      41  
      42  /*
      43  ** qadd_w0_s16_untied:
      44  **	mov	(z[0-9]+\.h), w0
      45  **	sqadd	z0\.h, (z1\.h, \1|\1, z1\.h)
      46  **	ret
      47  */
      48  TEST_UNIFORM_ZX (qadd_w0_s16_untied, svint16_t, int16_t,
      49  		 z0 = svqadd_n_s16 (z1, x0),
      50  		 z0 = svqadd (z1, x0))
      51  
      52  /*
      53  ** qadd_1_s16_tied1:
      54  **	sqadd	z0\.h, z0\.h, #1
      55  **	ret
      56  */
      57  TEST_UNIFORM_Z (qadd_1_s16_tied1, svint16_t,
      58  		z0 = svqadd_n_s16 (z0, 1),
      59  		z0 = svqadd (z0, 1))
      60  
      61  /*
      62  ** qadd_1_s16_untied:
      63  **	movprfx	z0, z1
      64  **	sqadd	z0\.h, z0\.h, #1
      65  **	ret
      66  */
      67  TEST_UNIFORM_Z (qadd_1_s16_untied, svint16_t,
      68  		z0 = svqadd_n_s16 (z1, 1),
      69  		z0 = svqadd (z1, 1))
      70  
      71  /*
      72  ** qadd_127_s16:
      73  **	sqadd	z0\.h, z0\.h, #127
      74  **	ret
      75  */
      76  TEST_UNIFORM_Z (qadd_127_s16, svint16_t,
      77  		z0 = svqadd_n_s16 (z0, 127),
      78  		z0 = svqadd (z0, 127))
      79  
      80  /*
      81  ** qadd_128_s16:
      82  **	sqadd	z0\.h, z0\.h, #128
      83  **	ret
      84  */
      85  TEST_UNIFORM_Z (qadd_128_s16, svint16_t,
      86  		z0 = svqadd_n_s16 (z0, 128),
      87  		z0 = svqadd (z0, 128))
      88  
      89  /*
      90  ** qadd_255_s16:
      91  **	sqadd	z0\.h, z0\.h, #255
      92  **	ret
      93  */
      94  TEST_UNIFORM_Z (qadd_255_s16, svint16_t,
      95  		z0 = svqadd_n_s16 (z0, 255),
      96  		z0 = svqadd (z0, 255))
      97  
      98  /*
      99  ** qadd_m1_s16:
     100  **	sqsub	z0\.h, z0\.h, #1
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (qadd_m1_s16, svint16_t,
     104  		z0 = svqadd_n_s16 (z0, -1),
     105  		z0 = svqadd (z0, -1))
     106  
     107  /*
     108  ** qadd_m127_s16:
     109  **	sqsub	z0\.h, z0\.h, #127
     110  **	ret
     111  */
     112  TEST_UNIFORM_Z (qadd_m127_s16, svint16_t,
     113  		z0 = svqadd_n_s16 (z0, -127),
     114  		z0 = svqadd (z0, -127))
     115  
     116  /*
     117  ** qadd_m128_s16:
     118  **	sqsub	z0\.h, z0\.h, #128
     119  **	ret
     120  */
     121  TEST_UNIFORM_Z (qadd_m128_s16, svint16_t,
     122  		z0 = svqadd_n_s16 (z0, -128),
     123  		z0 = svqadd (z0, -128))
     124  
     125  /*
     126  ** qadd_s16_m_tied1:
     127  **	sqadd	z0\.h, p0/m, z0\.h, z1\.h
     128  **	ret
     129  */
     130  TEST_UNIFORM_Z (qadd_s16_m_tied1, svint16_t,
     131  		z0 = svqadd_s16_m (p0, z0, z1),
     132  		z0 = svqadd_m (p0, z0, z1))
     133  
     134  /*
     135  ** qadd_s16_m_tied2:
     136  **	mov	(z[0-9]+)\.d, z0\.d
     137  **	movprfx	z0, z1
     138  **	sqadd	z0\.h, p0/m, z0\.h, \1\.h
     139  **	ret
     140  */
     141  TEST_UNIFORM_Z (qadd_s16_m_tied2, svint16_t,
     142  		z0 = svqadd_s16_m (p0, z1, z0),
     143  		z0 = svqadd_m (p0, z1, z0))
     144  
     145  /*
     146  ** qadd_s16_m_untied:
     147  **	movprfx	z0, z1
     148  **	sqadd	z0\.h, p0/m, z0\.h, z2\.h
     149  **	ret
     150  */
     151  TEST_UNIFORM_Z (qadd_s16_m_untied, svint16_t,
     152  		z0 = svqadd_s16_m (p0, z1, z2),
     153  		z0 = svqadd_m (p0, z1, z2))
     154  
     155  /*
     156  ** qadd_w0_s16_m_tied1:
     157  **	mov	(z[0-9]+\.h), w0
     158  **	sqadd	z0\.h, p0/m, z0\.h, \1
     159  **	ret
     160  */
     161  TEST_UNIFORM_ZX (qadd_w0_s16_m_tied1, svint16_t, int16_t,
     162  		 z0 = svqadd_n_s16_m (p0, z0, x0),
     163  		 z0 = svqadd_m (p0, z0, x0))
     164  
     165  /*
     166  ** qadd_w0_s16_m_untied: { xfail *-*-* }
     167  **	mov	(z[0-9]+\.h), w0
     168  **	movprfx	z0, z1
     169  **	sqadd	z0\.h, p0/m, z0\.h, \1
     170  **	ret
     171  */
     172  TEST_UNIFORM_ZX (qadd_w0_s16_m_untied, svint16_t, int16_t,
     173  		 z0 = svqadd_n_s16_m (p0, z1, x0),
     174  		 z0 = svqadd_m (p0, z1, x0))
     175  
     176  /*
     177  ** qadd_1_s16_m_tied1:
     178  **	mov	(z[0-9]+\.h), #1
     179  **	sqadd	z0\.h, p0/m, z0\.h, \1
     180  **	ret
     181  */
     182  TEST_UNIFORM_Z (qadd_1_s16_m_tied1, svint16_t,
     183  		z0 = svqadd_n_s16_m (p0, z0, 1),
     184  		z0 = svqadd_m (p0, z0, 1))
     185  
     186  /*
     187  ** qadd_1_s16_m_untied: { xfail *-*-* }
     188  **	mov	(z[0-9]+\.h), #1
     189  **	movprfx	z0, z1
     190  **	sqadd	z0\.h, p0/m, z0\.h, \1
     191  **	ret
     192  */
     193  TEST_UNIFORM_Z (qadd_1_s16_m_untied, svint16_t,
     194  		z0 = svqadd_n_s16_m (p0, z1, 1),
     195  		z0 = svqadd_m (p0, z1, 1))
     196  
     197  /*
     198  ** qadd_127_s16_m:
     199  **	mov	(z[0-9]+\.h), #127
     200  **	sqadd	z0\.h, p0/m, z0\.h, \1
     201  **	ret
     202  */
     203  TEST_UNIFORM_Z (qadd_127_s16_m, svint16_t,
     204  		z0 = svqadd_n_s16_m (p0, z0, 127),
     205  		z0 = svqadd_m (p0, z0, 127))
     206  
     207  /*
     208  ** qadd_128_s16_m:
     209  **	mov	(z[0-9]+\.h), #128
     210  **	sqadd	z0\.h, p0/m, z0\.h, \1
     211  **	ret
     212  */
     213  TEST_UNIFORM_Z (qadd_128_s16_m, svint16_t,
     214  		z0 = svqadd_n_s16_m (p0, z0, 128),
     215  		z0 = svqadd_m (p0, z0, 128))
     216  
     217  /*
     218  ** qadd_255_s16_m:
     219  **	mov	(z[0-9]+\.h), #255
     220  **	sqadd	z0\.h, p0/m, z0\.h, \1
     221  **	ret
     222  */
     223  TEST_UNIFORM_Z (qadd_255_s16_m, svint16_t,
     224  		z0 = svqadd_n_s16_m (p0, z0, 255),
     225  		z0 = svqadd_m (p0, z0, 255))
     226  
     227  /*
     228  ** qadd_m1_s16_m:
     229  **	mov	(z[0-9]+)\.b, #-1
     230  **	sqadd	z0\.h, p0/m, z0\.h, \1\.h
     231  **	ret
     232  */
     233  TEST_UNIFORM_Z (qadd_m1_s16_m, svint16_t,
     234  		z0 = svqadd_n_s16_m (p0, z0, -1),
     235  		z0 = svqadd_m (p0, z0, -1))
     236  
     237  /*
     238  ** qadd_m127_s16_m:
     239  **	mov	(z[0-9]+\.h), #-127
     240  **	sqadd	z0\.h, p0/m, z0\.h, \1
     241  **	ret
     242  */
     243  TEST_UNIFORM_Z (qadd_m127_s16_m, svint16_t,
     244  		z0 = svqadd_n_s16_m (p0, z0, -127),
     245  		z0 = svqadd_m (p0, z0, -127))
     246  
     247  /*
     248  ** qadd_m128_s16_m:
     249  **	mov	(z[0-9]+\.h), #-128
     250  **	sqadd	z0\.h, p0/m, z0\.h, \1
     251  **	ret
     252  */
     253  TEST_UNIFORM_Z (qadd_m128_s16_m, svint16_t,
     254  		z0 = svqadd_n_s16_m (p0, z0, -128),
     255  		z0 = svqadd_m (p0, z0, -128))
     256  
     257  /*
     258  ** qadd_s16_z_tied1:
     259  **	movprfx	z0\.h, p0/z, z0\.h
     260  **	sqadd	z0\.h, p0/m, z0\.h, z1\.h
     261  **	ret
     262  */
     263  TEST_UNIFORM_Z (qadd_s16_z_tied1, svint16_t,
     264  		z0 = svqadd_s16_z (p0, z0, z1),
     265  		z0 = svqadd_z (p0, z0, z1))
     266  
     267  /*
     268  ** qadd_s16_z_tied2:
     269  **	movprfx	z0\.h, p0/z, z0\.h
     270  **	sqadd	z0\.h, p0/m, z0\.h, z1\.h
     271  **	ret
     272  */
     273  TEST_UNIFORM_Z (qadd_s16_z_tied2, svint16_t,
     274  		z0 = svqadd_s16_z (p0, z1, z0),
     275  		z0 = svqadd_z (p0, z1, z0))
     276  
     277  /*
     278  ** qadd_s16_z_untied:
     279  ** (
     280  **	movprfx	z0\.h, p0/z, z1\.h
     281  **	sqadd	z0\.h, p0/m, z0\.h, z2\.h
     282  ** |
     283  **	movprfx	z0\.h, p0/z, z2\.h
     284  **	sqadd	z0\.h, p0/m, z0\.h, z1\.h
     285  ** )
     286  **	ret
     287  */
     288  TEST_UNIFORM_Z (qadd_s16_z_untied, svint16_t,
     289  		z0 = svqadd_s16_z (p0, z1, z2),
     290  		z0 = svqadd_z (p0, z1, z2))
     291  
     292  /*
     293  ** qadd_w0_s16_z_tied1:
     294  **	mov	(z[0-9]+\.h), w0
     295  **	movprfx	z0\.h, p0/z, z0\.h
     296  **	sqadd	z0\.h, p0/m, z0\.h, \1
     297  **	ret
     298  */
     299  TEST_UNIFORM_ZX (qadd_w0_s16_z_tied1, svint16_t, int16_t,
     300  		 z0 = svqadd_n_s16_z (p0, z0, x0),
     301  		 z0 = svqadd_z (p0, z0, x0))
     302  
     303  /*
     304  ** qadd_w0_s16_z_untied:
     305  **	mov	(z[0-9]+\.h), w0
     306  ** (
     307  **	movprfx	z0\.h, p0/z, z1\.h
     308  **	sqadd	z0\.h, p0/m, z0\.h, \1
     309  ** |
     310  **	movprfx	z0\.h, p0/z, \1
     311  **	sqadd	z0\.h, p0/m, z0\.h, z1\.h
     312  ** )
     313  **	ret
     314  */
     315  TEST_UNIFORM_ZX (qadd_w0_s16_z_untied, svint16_t, int16_t,
     316  		 z0 = svqadd_n_s16_z (p0, z1, x0),
     317  		 z0 = svqadd_z (p0, z1, x0))
     318  
     319  /*
     320  ** qadd_1_s16_z_tied1:
     321  **	mov	(z[0-9]+\.h), #1
     322  **	movprfx	z0\.h, p0/z, z0\.h
     323  **	sqadd	z0\.h, p0/m, z0\.h, \1
     324  **	ret
     325  */
     326  TEST_UNIFORM_Z (qadd_1_s16_z_tied1, svint16_t,
     327  		z0 = svqadd_n_s16_z (p0, z0, 1),
     328  		z0 = svqadd_z (p0, z0, 1))
     329  
     330  /*
     331  ** qadd_1_s16_z_untied:
     332  **	mov	(z[0-9]+\.h), #1
     333  ** (
     334  **	movprfx	z0\.h, p0/z, z1\.h
     335  **	sqadd	z0\.h, p0/m, z0\.h, \1
     336  ** |
     337  **	movprfx	z0\.h, p0/z, \1
     338  **	sqadd	z0\.h, p0/m, z0\.h, z1\.h
     339  ** )
     340  **	ret
     341  */
     342  TEST_UNIFORM_Z (qadd_1_s16_z_untied, svint16_t,
     343  		z0 = svqadd_n_s16_z (p0, z1, 1),
     344  		z0 = svqadd_z (p0, z1, 1))
     345  
     346  /*
     347  ** qadd_127_s16_z:
     348  **	mov	(z[0-9]+\.h), #127
     349  **	movprfx	z0\.h, p0/z, z0\.h
     350  **	sqadd	z0\.h, p0/m, z0\.h, \1
     351  **	ret
     352  */
     353  TEST_UNIFORM_Z (qadd_127_s16_z, svint16_t,
     354  		z0 = svqadd_n_s16_z (p0, z0, 127),
     355  		z0 = svqadd_z (p0, z0, 127))
     356  
     357  /*
     358  ** qadd_128_s16_z:
     359  **	mov	(z[0-9]+\.h), #128
     360  **	movprfx	z0\.h, p0/z, z0\.h
     361  **	sqadd	z0\.h, p0/m, z0\.h, \1
     362  **	ret
     363  */
     364  TEST_UNIFORM_Z (qadd_128_s16_z, svint16_t,
     365  		z0 = svqadd_n_s16_z (p0, z0, 128),
     366  		z0 = svqadd_z (p0, z0, 128))
     367  
     368  /*
     369  ** qadd_255_s16_z:
     370  **	mov	(z[0-9]+\.h), #255
     371  **	movprfx	z0\.h, p0/z, z0\.h
     372  **	sqadd	z0\.h, p0/m, z0\.h, \1
     373  **	ret
     374  */
     375  TEST_UNIFORM_Z (qadd_255_s16_z, svint16_t,
     376  		z0 = svqadd_n_s16_z (p0, z0, 255),
     377  		z0 = svqadd_z (p0, z0, 255))
     378  
     379  /*
     380  ** qadd_m1_s16_z:
     381  **	mov	(z[0-9]+)\.b, #-1
     382  **	movprfx	z0\.h, p0/z, z0\.h
     383  **	sqadd	z0\.h, p0/m, z0\.h, \1\.h
     384  **	ret
     385  */
     386  TEST_UNIFORM_Z (qadd_m1_s16_z, svint16_t,
     387  		z0 = svqadd_n_s16_z (p0, z0, -1),
     388  		z0 = svqadd_z (p0, z0, -1))
     389  
     390  /*
     391  ** qadd_m127_s16_z:
     392  **	mov	(z[0-9]+\.h), #-127
     393  **	movprfx	z0\.h, p0/z, z0\.h
     394  **	sqadd	z0\.h, p0/m, z0\.h, \1
     395  **	ret
     396  */
     397  TEST_UNIFORM_Z (qadd_m127_s16_z, svint16_t,
     398  		z0 = svqadd_n_s16_z (p0, z0, -127),
     399  		z0 = svqadd_z (p0, z0, -127))
     400  
     401  /*
     402  ** qadd_m128_s16_z:
     403  **	mov	(z[0-9]+\.h), #-128
     404  **	movprfx	z0\.h, p0/z, z0\.h
     405  **	sqadd	z0\.h, p0/m, z0\.h, \1
     406  **	ret
     407  */
     408  TEST_UNIFORM_Z (qadd_m128_s16_z, svint16_t,
     409  		z0 = svqadd_n_s16_z (p0, z0, -128),
     410  		z0 = svqadd_z (p0, z0, -128))
     411  
     412  /*
     413  ** qadd_s16_x_tied1:
     414  **	sqadd	z0\.h, (z0\.h, z1\.h|z1\.h, z0\.h)
     415  **	ret
     416  */
     417  TEST_UNIFORM_Z (qadd_s16_x_tied1, svint16_t,
     418  		z0 = svqadd_s16_x (p0, z0, z1),
     419  		z0 = svqadd_x (p0, z0, z1))
     420  
     421  /*
     422  ** qadd_s16_x_tied2:
     423  **	sqadd	z0\.h, (z0\.h, z1\.h|z1\.h, z0\.h)
     424  **	ret
     425  */
     426  TEST_UNIFORM_Z (qadd_s16_x_tied2, svint16_t,
     427  		z0 = svqadd_s16_x (p0, z1, z0),
     428  		z0 = svqadd_x (p0, z1, z0))
     429  
     430  /*
     431  ** qadd_s16_x_untied:
     432  **	sqadd	z0\.h, (z1\.h, z2\.h|z2\.h, z1\.h)
     433  **	ret
     434  */
     435  TEST_UNIFORM_Z (qadd_s16_x_untied, svint16_t,
     436  		z0 = svqadd_s16_x (p0, z1, z2),
     437  		z0 = svqadd_x (p0, z1, z2))
     438  
     439  /*
     440  ** qadd_w0_s16_x_tied1:
     441  **	mov	(z[0-9]+\.h), w0
     442  **	sqadd	z0\.h, (z0\.h, \1|\1, z0\.h)
     443  **	ret
     444  */
     445  TEST_UNIFORM_ZX (qadd_w0_s16_x_tied1, svint16_t, int16_t,
     446  		 z0 = svqadd_n_s16_x (p0, z0, x0),
     447  		 z0 = svqadd_x (p0, z0, x0))
     448  
     449  /*
     450  ** qadd_w0_s16_x_untied:
     451  **	mov	(z[0-9]+\.h), w0
     452  **	sqadd	z0\.h, (z1\.h, \1|\1, z1\.h)
     453  **	ret
     454  */
     455  TEST_UNIFORM_ZX (qadd_w0_s16_x_untied, svint16_t, int16_t,
     456  		 z0 = svqadd_n_s16_x (p0, z1, x0),
     457  		 z0 = svqadd_x (p0, z1, x0))
     458  
     459  /*
     460  ** qadd_1_s16_x_tied1:
     461  **	sqadd	z0\.h, z0\.h, #1
     462  **	ret
     463  */
     464  TEST_UNIFORM_Z (qadd_1_s16_x_tied1, svint16_t,
     465  		z0 = svqadd_n_s16_x (p0, z0, 1),
     466  		z0 = svqadd_x (p0, z0, 1))
     467  
     468  /*
     469  ** qadd_1_s16_x_untied:
     470  **	movprfx	z0, z1
     471  **	sqadd	z0\.h, z0\.h, #1
     472  **	ret
     473  */
     474  TEST_UNIFORM_Z (qadd_1_s16_x_untied, svint16_t,
     475  		z0 = svqadd_n_s16_x (p0, z1, 1),
     476  		z0 = svqadd_x (p0, z1, 1))
     477  
     478  /*
     479  ** qadd_127_s16_x:
     480  **	sqadd	z0\.h, z0\.h, #127
     481  **	ret
     482  */
     483  TEST_UNIFORM_Z (qadd_127_s16_x, svint16_t,
     484  		z0 = svqadd_n_s16_x (p0, z0, 127),
     485  		z0 = svqadd_x (p0, z0, 127))
     486  
     487  /*
     488  ** qadd_128_s16_x:
     489  **	sqadd	z0\.h, z0\.h, #128
     490  **	ret
     491  */
     492  TEST_UNIFORM_Z (qadd_128_s16_x, svint16_t,
     493  		z0 = svqadd_n_s16_x (p0, z0, 128),
     494  		z0 = svqadd_x (p0, z0, 128))
     495  
     496  /*
     497  ** qadd_255_s16_x:
     498  **	sqadd	z0\.h, z0\.h, #255
     499  **	ret
     500  */
     501  TEST_UNIFORM_Z (qadd_255_s16_x, svint16_t,
     502  		z0 = svqadd_n_s16_x (p0, z0, 255),
     503  		z0 = svqadd_x (p0, z0, 255))
     504  
     505  /*
     506  ** qadd_m1_s16_x:
     507  **	sqsub	z0\.h, z0\.h, #1
     508  **	ret
     509  */
     510  TEST_UNIFORM_Z (qadd_m1_s16_x, svint16_t,
     511  		z0 = svqadd_n_s16_x (p0, z0, -1),
     512  		z0 = svqadd_x (p0, z0, -1))
     513  
     514  /*
     515  ** qadd_m127_s16_x:
     516  **	sqsub	z0\.h, z0\.h, #127
     517  **	ret
     518  */
     519  TEST_UNIFORM_Z (qadd_m127_s16_x, svint16_t,
     520  		z0 = svqadd_n_s16_x (p0, z0, -127),
     521  		z0 = svqadd_x (p0, z0, -127))
     522  
     523  /*
     524  ** qadd_m128_s16_x:
     525  **	sqsub	z0\.h, z0\.h, #128
     526  **	ret
     527  */
     528  TEST_UNIFORM_Z (qadd_m128_s16_x, svint16_t,
     529  		z0 = svqadd_n_s16_x (p0, z0, -128),
     530  		z0 = svqadd_x (p0, z0, -128))