(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve2/
acle/
asm/
rhadd_s64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** rhadd_s64_m_tied1:
       7  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (rhadd_s64_m_tied1, svint64_t,
      11  		z0 = svrhadd_s64_m (p0, z0, z1),
      12  		z0 = svrhadd_m (p0, z0, z1))
      13  
      14  /*
      15  ** rhadd_s64_m_tied2:
      16  **	mov	(z[0-9]+\.d), z0\.d
      17  **	movprfx	z0, z1
      18  **	srhadd	z0\.d, p0/m, z0\.d, \1
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (rhadd_s64_m_tied2, svint64_t,
      22  		z0 = svrhadd_s64_m (p0, z1, z0),
      23  		z0 = svrhadd_m (p0, z1, z0))
      24  
      25  /*
      26  ** rhadd_s64_m_untied:
      27  **	movprfx	z0, z1
      28  **	srhadd	z0\.d, p0/m, z0\.d, z2\.d
      29  **	ret
      30  */
      31  TEST_UNIFORM_Z (rhadd_s64_m_untied, svint64_t,
      32  		z0 = svrhadd_s64_m (p0, z1, z2),
      33  		z0 = svrhadd_m (p0, z1, z2))
      34  
      35  /*
      36  ** rhadd_x0_s64_m_tied1:
      37  **	mov	(z[0-9]+\.d), x0
      38  **	srhadd	z0\.d, p0/m, z0\.d, \1
      39  **	ret
      40  */
      41  TEST_UNIFORM_ZX (rhadd_x0_s64_m_tied1, svint64_t, int64_t,
      42  		 z0 = svrhadd_n_s64_m (p0, z0, x0),
      43  		 z0 = svrhadd_m (p0, z0, x0))
      44  
      45  /*
      46  ** rhadd_x0_s64_m_untied:
      47  **	mov	(z[0-9]+\.d), x0
      48  **	movprfx	z0, z1
      49  **	srhadd	z0\.d, p0/m, z0\.d, \1
      50  **	ret
      51  */
      52  TEST_UNIFORM_ZX (rhadd_x0_s64_m_untied, svint64_t, int64_t,
      53  		 z0 = svrhadd_n_s64_m (p0, z1, x0),
      54  		 z0 = svrhadd_m (p0, z1, x0))
      55  
      56  /*
      57  ** rhadd_11_s64_m_tied1:
      58  **	mov	(z[0-9]+\.d), #11
      59  **	srhadd	z0\.d, p0/m, z0\.d, \1
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (rhadd_11_s64_m_tied1, svint64_t,
      63  		z0 = svrhadd_n_s64_m (p0, z0, 11),
      64  		z0 = svrhadd_m (p0, z0, 11))
      65  
      66  /*
      67  ** rhadd_11_s64_m_untied:: { xfail *-*-*}
      68  **	mov	(z[0-9]+\.d), #11
      69  **	movprfx	z0, z1
      70  **	srhadd	z0\.d, p0/m, z0\.d, \1
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (rhadd_11_s64_m_untied, svint64_t,
      74  		z0 = svrhadd_n_s64_m (p0, z1, 11),
      75  		z0 = svrhadd_m (p0, z1, 11))
      76  
      77  /*
      78  ** rhadd_s64_z_tied1:
      79  **	movprfx	z0\.d, p0/z, z0\.d
      80  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (rhadd_s64_z_tied1, svint64_t,
      84  		z0 = svrhadd_s64_z (p0, z0, z1),
      85  		z0 = svrhadd_z (p0, z0, z1))
      86  
      87  /*
      88  ** rhadd_s64_z_tied2:
      89  **	movprfx	z0\.d, p0/z, z0\.d
      90  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
      91  **	ret
      92  */
      93  TEST_UNIFORM_Z (rhadd_s64_z_tied2, svint64_t,
      94  		z0 = svrhadd_s64_z (p0, z1, z0),
      95  		z0 = svrhadd_z (p0, z1, z0))
      96  
      97  /*
      98  ** rhadd_s64_z_untied:
      99  ** (
     100  **	movprfx	z0\.d, p0/z, z1\.d
     101  **	srhadd	z0\.d, p0/m, z0\.d, z2\.d
     102  ** |
     103  **	movprfx	z0\.d, p0/z, z2\.d
     104  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     105  ** )
     106  **	ret
     107  */
     108  TEST_UNIFORM_Z (rhadd_s64_z_untied, svint64_t,
     109  		z0 = svrhadd_s64_z (p0, z1, z2),
     110  		z0 = svrhadd_z (p0, z1, z2))
     111  
     112  /*
     113  ** rhadd_x0_s64_z_tied1:
     114  **	mov	(z[0-9]+\.d), x0
     115  **	movprfx	z0\.d, p0/z, z0\.d
     116  **	srhadd	z0\.d, p0/m, z0\.d, \1
     117  **	ret
     118  */
     119  TEST_UNIFORM_ZX (rhadd_x0_s64_z_tied1, svint64_t, int64_t,
     120  		 z0 = svrhadd_n_s64_z (p0, z0, x0),
     121  		 z0 = svrhadd_z (p0, z0, x0))
     122  
     123  /*
     124  ** rhadd_x0_s64_z_untied:
     125  **	mov	(z[0-9]+\.d), x0
     126  ** (
     127  **	movprfx	z0\.d, p0/z, z1\.d
     128  **	srhadd	z0\.d, p0/m, z0\.d, \1
     129  ** |
     130  **	movprfx	z0\.d, p0/z, \1
     131  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     132  ** )
     133  **	ret
     134  */
     135  TEST_UNIFORM_ZX (rhadd_x0_s64_z_untied, svint64_t, int64_t,
     136  		 z0 = svrhadd_n_s64_z (p0, z1, x0),
     137  		 z0 = svrhadd_z (p0, z1, x0))
     138  
     139  /*
     140  ** rhadd_11_s64_z_tied1:
     141  **	mov	(z[0-9]+\.d), #11
     142  **	movprfx	z0\.d, p0/z, z0\.d
     143  **	srhadd	z0\.d, p0/m, z0\.d, \1
     144  **	ret
     145  */
     146  TEST_UNIFORM_Z (rhadd_11_s64_z_tied1, svint64_t,
     147  		z0 = svrhadd_n_s64_z (p0, z0, 11),
     148  		z0 = svrhadd_z (p0, z0, 11))
     149  
     150  /*
     151  ** rhadd_11_s64_z_untied:
     152  **	mov	(z[0-9]+\.d), #11
     153  ** (
     154  **	movprfx	z0\.d, p0/z, z1\.d
     155  **	srhadd	z0\.d, p0/m, z0\.d, \1
     156  ** |
     157  **	movprfx	z0\.d, p0/z, \1
     158  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     159  ** )
     160  **	ret
     161  */
     162  TEST_UNIFORM_Z (rhadd_11_s64_z_untied, svint64_t,
     163  		z0 = svrhadd_n_s64_z (p0, z1, 11),
     164  		z0 = svrhadd_z (p0, z1, 11))
     165  
     166  /*
     167  ** rhadd_s64_x_tied1:
     168  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     169  **	ret
     170  */
     171  TEST_UNIFORM_Z (rhadd_s64_x_tied1, svint64_t,
     172  		z0 = svrhadd_s64_x (p0, z0, z1),
     173  		z0 = svrhadd_x (p0, z0, z1))
     174  
     175  /*
     176  ** rhadd_s64_x_tied2:
     177  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     178  **	ret
     179  */
     180  TEST_UNIFORM_Z (rhadd_s64_x_tied2, svint64_t,
     181  		z0 = svrhadd_s64_x (p0, z1, z0),
     182  		z0 = svrhadd_x (p0, z1, z0))
     183  
     184  /*
     185  ** rhadd_s64_x_untied:
     186  ** (
     187  **	movprfx	z0, z1
     188  **	srhadd	z0\.d, p0/m, z0\.d, z2\.d
     189  ** |
     190  **	movprfx	z0, z2
     191  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     192  ** )
     193  **	ret
     194  */
     195  TEST_UNIFORM_Z (rhadd_s64_x_untied, svint64_t,
     196  		z0 = svrhadd_s64_x (p0, z1, z2),
     197  		z0 = svrhadd_x (p0, z1, z2))
     198  
     199  /*
     200  ** rhadd_x0_s64_x_tied1:
     201  **	mov	(z[0-9]+\.d), x0
     202  **	srhadd	z0\.d, p0/m, z0\.d, \1
     203  **	ret
     204  */
     205  TEST_UNIFORM_ZX (rhadd_x0_s64_x_tied1, svint64_t, int64_t,
     206  		 z0 = svrhadd_n_s64_x (p0, z0, x0),
     207  		 z0 = svrhadd_x (p0, z0, x0))
     208  
     209  /*
     210  ** rhadd_x0_s64_x_untied:
     211  **	mov	z0\.d, x0
     212  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     213  **	ret
     214  */
     215  TEST_UNIFORM_ZX (rhadd_x0_s64_x_untied, svint64_t, int64_t,
     216  		 z0 = svrhadd_n_s64_x (p0, z1, x0),
     217  		 z0 = svrhadd_x (p0, z1, x0))
     218  
     219  /*
     220  ** rhadd_11_s64_x_tied1:
     221  **	mov	(z[0-9]+\.d), #11
     222  **	srhadd	z0\.d, p0/m, z0\.d, \1
     223  **	ret
     224  */
     225  TEST_UNIFORM_Z (rhadd_11_s64_x_tied1, svint64_t,
     226  		z0 = svrhadd_n_s64_x (p0, z0, 11),
     227  		z0 = svrhadd_x (p0, z0, 11))
     228  
     229  /*
     230  ** rhadd_11_s64_x_untied:
     231  **	mov	z0\.d, #11
     232  **	srhadd	z0\.d, p0/m, z0\.d, z1\.d
     233  **	ret
     234  */
     235  TEST_UNIFORM_Z (rhadd_11_s64_x_untied, svint64_t,
     236  		z0 = svrhadd_n_s64_x (p0, z1, 11),
     237  		z0 = svrhadd_x (p0, z1, 11))