1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** cadd_90_f64_m_tied1:
       7  **	fcadd	z0\.d, p0/m, z0\.d, z1\.d, #90
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (cadd_90_f64_m_tied1, svfloat64_t,
      11  		z0 = svcadd_f64_m (p0, z0, z1, 90),
      12  		z0 = svcadd_m (p0, z0, z1, 90))
      13  
      14  /*
      15  ** cadd_90_f64_m_tied2:
      16  **	mov	(z[0-9]+\.d), z0\.d
      17  **	movprfx	z0, z1
      18  **	fcadd	z0\.d, p0/m, z0\.d, \1, #90
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (cadd_90_f64_m_tied2, svfloat64_t,
      22  		z0 = svcadd_f64_m (p0, z1, z0, 90),
      23  		z0 = svcadd_m (p0, z1, z0, 90))
      24  
      25  /*
      26  ** cadd_90_f64_m_untied:
      27  **	movprfx	z0, z1
      28  **	fcadd	z0\.d, p0/m, z0\.d, z2\.d, #90
      29  **	ret
      30  */
      31  TEST_UNIFORM_Z (cadd_90_f64_m_untied, svfloat64_t,
      32  		z0 = svcadd_f64_m (p0, z1, z2, 90),
      33  		z0 = svcadd_m (p0, z1, z2, 90))
      34  
      35  /*
      36  ** cadd_270_f64_m_tied1:
      37  **	fcadd	z0\.d, p0/m, z0\.d, z1\.d, #270
      38  **	ret
      39  */
      40  TEST_UNIFORM_Z (cadd_270_f64_m_tied1, svfloat64_t,
      41  		z0 = svcadd_f64_m (p0, z0, z1, 270),
      42  		z0 = svcadd_m (p0, z0, z1, 270))
      43  
      44  /*
      45  ** cadd_270_f64_m_tied2:
      46  **	mov	(z[0-9]+\.d), z0\.d
      47  **	movprfx	z0, z1
      48  **	fcadd	z0\.d, p0/m, z0\.d, \1, #270
      49  **	ret
      50  */
      51  TEST_UNIFORM_Z (cadd_270_f64_m_tied2, svfloat64_t,
      52  		z0 = svcadd_f64_m (p0, z1, z0, 270),
      53  		z0 = svcadd_m (p0, z1, z0, 270))
      54  
      55  /*
      56  ** cadd_270_f64_m_untied:
      57  **	movprfx	z0, z1
      58  **	fcadd	z0\.d, p0/m, z0\.d, z2\.d, #270
      59  **	ret
      60  */
      61  TEST_UNIFORM_Z (cadd_270_f64_m_untied, svfloat64_t,
      62  		z0 = svcadd_f64_m (p0, z1, z2, 270),
      63  		z0 = svcadd_m (p0, z1, z2, 270))
      64  
      65  /*
      66  ** cadd_90_f64_z_tied1:
      67  **	movprfx	z0\.d, p0/z, z0\.d
      68  **	fcadd	z0\.d, p0/m, z0\.d, z1\.d, #90
      69  **	ret
      70  */
      71  TEST_UNIFORM_Z (cadd_90_f64_z_tied1, svfloat64_t,
      72  		z0 = svcadd_f64_z (p0, z0, z1, 90),
      73  		z0 = svcadd_z (p0, z0, z1, 90))
      74  
      75  /*
      76  ** cadd_90_f64_z_tied2:
      77  **	mov	(z[0-9]+\.d), z0\.d
      78  **	movprfx	z0\.d, p0/z, z1\.d
      79  **	fcadd	z0\.d, p0/m, z0\.d, \1, #90
      80  **	ret
      81  */
      82  TEST_UNIFORM_Z (cadd_90_f64_z_tied2, svfloat64_t,
      83  		z0 = svcadd_f64_z (p0, z1, z0, 90),
      84  		z0 = svcadd_z (p0, z1, z0, 90))
      85  
      86  /*
      87  ** cadd_90_f64_z_untied:
      88  **	movprfx	z0\.d, p0/z, z1\.d
      89  **	fcadd	z0\.d, p0/m, z0\.d, z2\.d, #90
      90  **	ret
      91  */
      92  TEST_UNIFORM_Z (cadd_90_f64_z_untied, svfloat64_t,
      93  		z0 = svcadd_f64_z (p0, z1, z2, 90),
      94  		z0 = svcadd_z (p0, z1, z2, 90))
      95  
      96  /*
      97  ** cadd_270_f64_z_tied1:
      98  **	movprfx	z0\.d, p0/z, z0\.d
      99  **	fcadd	z0\.d, p0/m, z0\.d, z1\.d, #270
     100  **	ret
     101  */
     102  TEST_UNIFORM_Z (cadd_270_f64_z_tied1, svfloat64_t,
     103  		z0 = svcadd_f64_z (p0, z0, z1, 270),
     104  		z0 = svcadd_z (p0, z0, z1, 270))
     105  
     106  /*
     107  ** cadd_270_f64_z_tied2:
     108  **	mov	(z[0-9]+\.d), z0\.d
     109  **	movprfx	z0\.d, p0/z, z1\.d
     110  **	fcadd	z0\.d, p0/m, z0\.d, \1, #270
     111  **	ret
     112  */
     113  TEST_UNIFORM_Z (cadd_270_f64_z_tied2, svfloat64_t,
     114  		z0 = svcadd_f64_z (p0, z1, z0, 270),
     115  		z0 = svcadd_z (p0, z1, z0, 270))
     116  
     117  /*
     118  ** cadd_270_f64_z_untied:
     119  **	movprfx	z0\.d, p0/z, z1\.d
     120  **	fcadd	z0\.d, p0/m, z0\.d, z2\.d, #270
     121  **	ret
     122  */
     123  TEST_UNIFORM_Z (cadd_270_f64_z_untied, svfloat64_t,
     124  		z0 = svcadd_f64_z (p0, z1, z2, 270),
     125  		z0 = svcadd_z (p0, z1, z2, 270))
     126  
     127  /*
     128  ** cadd_90_f64_x_tied1:
     129  **	fcadd	z0\.d, p0/m, z0\.d, z1\.d, #90
     130  **	ret
     131  */
     132  TEST_UNIFORM_Z (cadd_90_f64_x_tied1, svfloat64_t,
     133  		z0 = svcadd_f64_x (p0, z0, z1, 90),
     134  		z0 = svcadd_x (p0, z0, z1, 90))
     135  
     136  /*
     137  ** cadd_90_f64_x_tied2:
     138  **	mov	(z[0-9]+\.d), z0\.d
     139  **	movprfx	z0, z1
     140  **	fcadd	z0\.d, p0/m, z0\.d, \1, #90
     141  **	ret
     142  */
     143  TEST_UNIFORM_Z (cadd_90_f64_x_tied2, svfloat64_t,
     144  		z0 = svcadd_f64_x (p0, z1, z0, 90),
     145  		z0 = svcadd_x (p0, z1, z0, 90))
     146  
     147  /*
     148  ** cadd_90_f64_x_untied:
     149  **	movprfx	z0, z1
     150  **	fcadd	z0\.d, p0/m, z0\.d, z2\.d, #90
     151  **	ret
     152  */
     153  TEST_UNIFORM_Z (cadd_90_f64_x_untied, svfloat64_t,
     154  		z0 = svcadd_f64_x (p0, z1, z2, 90),
     155  		z0 = svcadd_x (p0, z1, z2, 90))
     156  
     157  /*
     158  ** cadd_270_f64_x_tied1:
     159  **	fcadd	z0\.d, p0/m, z0\.d, z1\.d, #270
     160  **	ret
     161  */
     162  TEST_UNIFORM_Z (cadd_270_f64_x_tied1, svfloat64_t,
     163  		z0 = svcadd_f64_x (p0, z0, z1, 270),
     164  		z0 = svcadd_x (p0, z0, z1, 270))
     165  
     166  /*
     167  ** cadd_270_f64_x_tied2:
     168  **	mov	(z[0-9]+\.d), z0\.d
     169  **	movprfx	z0, z1
     170  **	fcadd	z0\.d, p0/m, z0\.d, \1, #270
     171  **	ret
     172  */
     173  TEST_UNIFORM_Z (cadd_270_f64_x_tied2, svfloat64_t,
     174  		z0 = svcadd_f64_x (p0, z1, z0, 270),
     175  		z0 = svcadd_x (p0, z1, z0, 270))
     176  
     177  /*
     178  ** cadd_270_f64_x_untied:
     179  **	movprfx	z0, z1
     180  **	fcadd	z0\.d, p0/m, z0\.d, z2\.d, #270
     181  **	ret
     182  */
     183  TEST_UNIFORM_Z (cadd_270_f64_x_untied, svfloat64_t,
     184  		z0 = svcadd_f64_x (p0, z1, z2, 270),
     185  		z0 = svcadd_x (p0, z1, z2, 270))
     186  
     187  /*
     188  ** ptrue_cadd_90_f64_x_tied1:
     189  **	...
     190  **	ptrue	p[0-9]+\.b[^\n]*
     191  **	...
     192  **	ret
     193  */
     194  TEST_UNIFORM_Z (ptrue_cadd_90_f64_x_tied1, svfloat64_t,
     195  		z0 = svcadd_f64_x (svptrue_b64 (), z0, z1, 90),
     196  		z0 = svcadd_x (svptrue_b64 (), z0, z1, 90))
     197  
     198  /*
     199  ** ptrue_cadd_90_f64_x_tied2:
     200  **	...
     201  **	ptrue	p[0-9]+\.b[^\n]*
     202  **	...
     203  **	ret
     204  */
     205  TEST_UNIFORM_Z (ptrue_cadd_90_f64_x_tied2, svfloat64_t,
     206  		z0 = svcadd_f64_x (svptrue_b64 (), z1, z0, 90),
     207  		z0 = svcadd_x (svptrue_b64 (), z1, z0, 90))
     208  
     209  /*
     210  ** ptrue_cadd_90_f64_x_untied:
     211  **	...
     212  **	ptrue	p[0-9]+\.b[^\n]*
     213  **	...
     214  **	ret
     215  */
     216  TEST_UNIFORM_Z (ptrue_cadd_90_f64_x_untied, svfloat64_t,
     217  		z0 = svcadd_f64_x (svptrue_b64 (), z1, z2, 90),
     218  		z0 = svcadd_x (svptrue_b64 (), z1, z2, 90))
     219  
     220  /*
     221  ** ptrue_cadd_270_f64_x_tied1:
     222  **	...
     223  **	ptrue	p[0-9]+\.b[^\n]*
     224  **	...
     225  **	ret
     226  */
     227  TEST_UNIFORM_Z (ptrue_cadd_270_f64_x_tied1, svfloat64_t,
     228  		z0 = svcadd_f64_x (svptrue_b64 (), z0, z1, 270),
     229  		z0 = svcadd_x (svptrue_b64 (), z0, z1, 270))
     230  
     231  /*
     232  ** ptrue_cadd_270_f64_x_tied2:
     233  **	...
     234  **	ptrue	p[0-9]+\.b[^\n]*
     235  **	...
     236  **	ret
     237  */
     238  TEST_UNIFORM_Z (ptrue_cadd_270_f64_x_tied2, svfloat64_t,
     239  		z0 = svcadd_f64_x (svptrue_b64 (), z1, z0, 270),
     240  		z0 = svcadd_x (svptrue_b64 (), z1, z0, 270))
     241  
     242  /*
     243  ** ptrue_cadd_270_f64_x_untied:
     244  **	...
     245  **	ptrue	p[0-9]+\.b[^\n]*
     246  **	...
     247  **	ret
     248  */
     249  TEST_UNIFORM_Z (ptrue_cadd_270_f64_x_untied, svfloat64_t,
     250  		z0 = svcadd_f64_x (svptrue_b64 (), z1, z2, 270),
     251  		z0 = svcadd_x (svptrue_b64 (), z1, z2, 270))