(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
mad_s64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** mad_s64_m_tied1:
       7  **	mad	z0\.d, p0/m, z1\.d, z2\.d
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (mad_s64_m_tied1, svint64_t,
      11  		z0 = svmad_s64_m (p0, z0, z1, z2),
      12  		z0 = svmad_m (p0, z0, z1, z2))
      13  
      14  /*
      15  ** mad_s64_m_tied2:
      16  **	mov	(z[0-9]+\.d), z0\.d
      17  **	movprfx	z0, z1
      18  **	mad	z0\.d, p0/m, \1, z2\.d
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (mad_s64_m_tied2, svint64_t,
      22  		z0 = svmad_s64_m (p0, z1, z0, z2),
      23  		z0 = svmad_m (p0, z1, z0, z2))
      24  
      25  /*
      26  ** mad_s64_m_tied3:
      27  **	mov	(z[0-9]+\.d), z0\.d
      28  **	movprfx	z0, z1
      29  **	mad	z0\.d, p0/m, z2\.d, \1
      30  **	ret
      31  */
      32  TEST_UNIFORM_Z (mad_s64_m_tied3, svint64_t,
      33  		z0 = svmad_s64_m (p0, z1, z2, z0),
      34  		z0 = svmad_m (p0, z1, z2, z0))
      35  
      36  /*
      37  ** mad_s64_m_untied:
      38  **	movprfx	z0, z1
      39  **	mad	z0\.d, p0/m, z2\.d, z3\.d
      40  **	ret
      41  */
      42  TEST_UNIFORM_Z (mad_s64_m_untied, svint64_t,
      43  		z0 = svmad_s64_m (p0, z1, z2, z3),
      44  		z0 = svmad_m (p0, z1, z2, z3))
      45  
      46  /*
      47  ** mad_x0_s64_m_tied1:
      48  **	mov	(z[0-9]+\.d), x0
      49  **	mad	z0\.d, p0/m, z1\.d, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (mad_x0_s64_m_tied1, svint64_t, int64_t,
      53  		 z0 = svmad_n_s64_m (p0, z0, z1, x0),
      54  		 z0 = svmad_m (p0, z0, z1, x0))
      55  
      56  /*
      57  ** mad_x0_s64_m_untied:
      58  **	mov	(z[0-9]+\.d), x0
      59  **	movprfx	z0, z1
      60  **	mad	z0\.d, p0/m, z2\.d, \1
      61  **	ret
      62  */
      63  TEST_UNIFORM_ZX (mad_x0_s64_m_untied, svint64_t, int64_t,
      64  		 z0 = svmad_n_s64_m (p0, z1, z2, x0),
      65  		 z0 = svmad_m (p0, z1, z2, x0))
      66  
      67  /*
      68  ** mad_11_s64_m_tied1:
      69  **	mov	(z[0-9]+\.d), #11
      70  **	mad	z0\.d, p0/m, z1\.d, \1
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (mad_11_s64_m_tied1, svint64_t,
      74  		z0 = svmad_n_s64_m (p0, z0, z1, 11),
      75  		z0 = svmad_m (p0, z0, z1, 11))
      76  
      77  /*
      78  ** mad_11_s64_m_untied: { xfail *-*-* }
      79  **	mov	(z[0-9]+\.d), #11
      80  **	movprfx	z0, z1
      81  **	mad	z0\.d, p0/m, z2\.d, \1
      82  **	ret
      83  */
      84  TEST_UNIFORM_Z (mad_11_s64_m_untied, svint64_t,
      85  		z0 = svmad_n_s64_m (p0, z1, z2, 11),
      86  		z0 = svmad_m (p0, z1, z2, 11))
      87  
      88  /*
      89  ** mad_s64_z_tied1:
      90  **	movprfx	z0\.d, p0/z, z0\.d
      91  **	mad	z0\.d, p0/m, z1\.d, z2\.d
      92  **	ret
      93  */
      94  TEST_UNIFORM_Z (mad_s64_z_tied1, svint64_t,
      95  		z0 = svmad_s64_z (p0, z0, z1, z2),
      96  		z0 = svmad_z (p0, z0, z1, z2))
      97  
      98  /*
      99  ** mad_s64_z_tied2:
     100  **	movprfx	z0\.d, p0/z, z0\.d
     101  **	mad	z0\.d, p0/m, z1\.d, z2\.d
     102  **	ret
     103  */
     104  TEST_UNIFORM_Z (mad_s64_z_tied2, svint64_t,
     105  		z0 = svmad_s64_z (p0, z1, z0, z2),
     106  		z0 = svmad_z (p0, z1, z0, z2))
     107  
     108  /*
     109  ** mad_s64_z_tied3:
     110  **	movprfx	z0\.d, p0/z, z0\.d
     111  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     112  **	ret
     113  */
     114  TEST_UNIFORM_Z (mad_s64_z_tied3, svint64_t,
     115  		z0 = svmad_s64_z (p0, z1, z2, z0),
     116  		z0 = svmad_z (p0, z1, z2, z0))
     117  
     118  /*
     119  ** mad_s64_z_untied:
     120  ** (
     121  **	movprfx	z0\.d, p0/z, z1\.d
     122  **	mad	z0\.d, p0/m, z2\.d, z3\.d
     123  ** |
     124  **	movprfx	z0\.d, p0/z, z2\.d
     125  **	mad	z0\.d, p0/m, z1\.d, z3\.d
     126  ** |
     127  **	movprfx	z0\.d, p0/z, z3\.d
     128  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     129  ** )
     130  **	ret
     131  */
     132  TEST_UNIFORM_Z (mad_s64_z_untied, svint64_t,
     133  		z0 = svmad_s64_z (p0, z1, z2, z3),
     134  		z0 = svmad_z (p0, z1, z2, z3))
     135  
     136  /*
     137  ** mad_x0_s64_z_tied1:
     138  **	mov	(z[0-9]+\.d), x0
     139  **	movprfx	z0\.d, p0/z, z0\.d
     140  **	mad	z0\.d, p0/m, z1\.d, \1
     141  **	ret
     142  */
     143  TEST_UNIFORM_ZX (mad_x0_s64_z_tied1, svint64_t, int64_t,
     144  		 z0 = svmad_n_s64_z (p0, z0, z1, x0),
     145  		 z0 = svmad_z (p0, z0, z1, x0))
     146  
     147  /*
     148  ** mad_x0_s64_z_tied2:
     149  **	mov	(z[0-9]+\.d), x0
     150  **	movprfx	z0\.d, p0/z, z0\.d
     151  **	mad	z0\.d, p0/m, z1\.d, \1
     152  **	ret
     153  */
     154  TEST_UNIFORM_ZX (mad_x0_s64_z_tied2, svint64_t, int64_t,
     155  		 z0 = svmad_n_s64_z (p0, z1, z0, x0),
     156  		 z0 = svmad_z (p0, z1, z0, x0))
     157  
     158  /*
     159  ** mad_x0_s64_z_untied:
     160  **	mov	(z[0-9]+\.d), x0
     161  ** (
     162  **	movprfx	z0\.d, p0/z, z1\.d
     163  **	mad	z0\.d, p0/m, z2\.d, \1
     164  ** |
     165  **	movprfx	z0\.d, p0/z, z2\.d
     166  **	mad	z0\.d, p0/m, z1\.d, \1
     167  ** |
     168  **	movprfx	z0\.d, p0/z, \1
     169  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     170  ** )
     171  **	ret
     172  */
     173  TEST_UNIFORM_ZX (mad_x0_s64_z_untied, svint64_t, int64_t,
     174  		 z0 = svmad_n_s64_z (p0, z1, z2, x0),
     175  		 z0 = svmad_z (p0, z1, z2, x0))
     176  
     177  /*
     178  ** mad_11_s64_z_tied1:
     179  **	mov	(z[0-9]+\.d), #11
     180  **	movprfx	z0\.d, p0/z, z0\.d
     181  **	mad	z0\.d, p0/m, z1\.d, \1
     182  **	ret
     183  */
     184  TEST_UNIFORM_Z (mad_11_s64_z_tied1, svint64_t,
     185  		z0 = svmad_n_s64_z (p0, z0, z1, 11),
     186  		z0 = svmad_z (p0, z0, z1, 11))
     187  
     188  /*
     189  ** mad_11_s64_z_tied2:
     190  **	mov	(z[0-9]+\.d), #11
     191  **	movprfx	z0\.d, p0/z, z0\.d
     192  **	mad	z0\.d, p0/m, z1\.d, \1
     193  **	ret
     194  */
     195  TEST_UNIFORM_Z (mad_11_s64_z_tied2, svint64_t,
     196  		z0 = svmad_n_s64_z (p0, z1, z0, 11),
     197  		z0 = svmad_z (p0, z1, z0, 11))
     198  
     199  /*
     200  ** mad_11_s64_z_untied:
     201  **	mov	(z[0-9]+\.d), #11
     202  ** (
     203  **	movprfx	z0\.d, p0/z, z1\.d
     204  **	mad	z0\.d, p0/m, z2\.d, \1
     205  ** |
     206  **	movprfx	z0\.d, p0/z, z2\.d
     207  **	mad	z0\.d, p0/m, z1\.d, \1
     208  ** |
     209  **	movprfx	z0\.d, p0/z, \1
     210  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     211  ** )
     212  **	ret
     213  */
     214  TEST_UNIFORM_Z (mad_11_s64_z_untied, svint64_t,
     215  		z0 = svmad_n_s64_z (p0, z1, z2, 11),
     216  		z0 = svmad_z (p0, z1, z2, 11))
     217  
     218  /*
     219  ** mad_s64_x_tied1:
     220  **	mad	z0\.d, p0/m, z1\.d, z2\.d
     221  **	ret
     222  */
     223  TEST_UNIFORM_Z (mad_s64_x_tied1, svint64_t,
     224  		z0 = svmad_s64_x (p0, z0, z1, z2),
     225  		z0 = svmad_x (p0, z0, z1, z2))
     226  
     227  /*
     228  ** mad_s64_x_tied2:
     229  **	mad	z0\.d, p0/m, z1\.d, z2\.d
     230  **	ret
     231  */
     232  TEST_UNIFORM_Z (mad_s64_x_tied2, svint64_t,
     233  		z0 = svmad_s64_x (p0, z1, z0, z2),
     234  		z0 = svmad_x (p0, z1, z0, z2))
     235  
     236  /*
     237  ** mad_s64_x_tied3:
     238  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     239  **	ret
     240  */
     241  TEST_UNIFORM_Z (mad_s64_x_tied3, svint64_t,
     242  		z0 = svmad_s64_x (p0, z1, z2, z0),
     243  		z0 = svmad_x (p0, z1, z2, z0))
     244  
     245  /*
     246  ** mad_s64_x_untied:
     247  ** (
     248  **	movprfx	z0, z1
     249  **	mad	z0\.d, p0/m, z2\.d, z3\.d
     250  ** |
     251  **	movprfx	z0, z2
     252  **	mad	z0\.d, p0/m, z1\.d, z3\.d
     253  ** |
     254  **	movprfx	z0, z3
     255  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     256  ** )
     257  **	ret
     258  */
     259  TEST_UNIFORM_Z (mad_s64_x_untied, svint64_t,
     260  		z0 = svmad_s64_x (p0, z1, z2, z3),
     261  		z0 = svmad_x (p0, z1, z2, z3))
     262  
     263  /*
     264  ** mad_x0_s64_x_tied1:
     265  **	mov	(z[0-9]+\.d), x0
     266  **	mad	z0\.d, p0/m, z1\.d, \1
     267  **	ret
     268  */
     269  TEST_UNIFORM_ZX (mad_x0_s64_x_tied1, svint64_t, int64_t,
     270  		 z0 = svmad_n_s64_x (p0, z0, z1, x0),
     271  		 z0 = svmad_x (p0, z0, z1, x0))
     272  
     273  /*
     274  ** mad_x0_s64_x_tied2:
     275  **	mov	(z[0-9]+\.d), x0
     276  **	mad	z0\.d, p0/m, z1\.d, \1
     277  **	ret
     278  */
     279  TEST_UNIFORM_ZX (mad_x0_s64_x_tied2, svint64_t, int64_t,
     280  		 z0 = svmad_n_s64_x (p0, z1, z0, x0),
     281  		 z0 = svmad_x (p0, z1, z0, x0))
     282  
     283  /*
     284  ** mad_x0_s64_x_untied:
     285  **	mov	z0\.d, x0
     286  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     287  **	ret
     288  */
     289  TEST_UNIFORM_ZX (mad_x0_s64_x_untied, svint64_t, int64_t,
     290  		 z0 = svmad_n_s64_x (p0, z1, z2, x0),
     291  		 z0 = svmad_x (p0, z1, z2, x0))
     292  
     293  /*
     294  ** mad_11_s64_x_tied1:
     295  **	mov	(z[0-9]+\.d), #11
     296  **	mad	z0\.d, p0/m, z1\.d, \1
     297  **	ret
     298  */
     299  TEST_UNIFORM_Z (mad_11_s64_x_tied1, svint64_t,
     300  		z0 = svmad_n_s64_x (p0, z0, z1, 11),
     301  		z0 = svmad_x (p0, z0, z1, 11))
     302  
     303  /*
     304  ** mad_11_s64_x_tied2:
     305  **	mov	(z[0-9]+\.d), #11
     306  **	mad	z0\.d, p0/m, z1\.d, \1
     307  **	ret
     308  */
     309  TEST_UNIFORM_Z (mad_11_s64_x_tied2, svint64_t,
     310  		z0 = svmad_n_s64_x (p0, z1, z0, 11),
     311  		z0 = svmad_x (p0, z1, z0, 11))
     312  
     313  /*
     314  ** mad_11_s64_x_untied:
     315  **	mov	z0\.d, #11
     316  **	mla	z0\.d, p0/m, z1\.d, z2\.d
     317  **	ret
     318  */
     319  TEST_UNIFORM_Z (mad_11_s64_x_untied, svint64_t,
     320  		z0 = svmad_n_s64_x (p0, z1, z2, 11),
     321  		z0 = svmad_x (p0, z1, z2, 11))