1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qrdcmlah_0_s64_tied1:
       7  **	sqrdcmlah	z0\.d, z1\.d, z2\.d, #0
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qrdcmlah_0_s64_tied1, svint64_t,
      11  		z0 = svqrdcmlah_s64 (z0, z1, z2, 0),
      12  		z0 = svqrdcmlah (z0, z1, z2, 0))
      13  
      14  /*
      15  ** qrdcmlah_0_s64_tied2:
      16  **	mov	(z[0-9]+\.d), z0\.d
      17  **	movprfx	z0, z1
      18  **	sqrdcmlah	z0\.d, \1, z2\.d, #0
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (qrdcmlah_0_s64_tied2, svint64_t,
      22  		z0 = svqrdcmlah_s64 (z1, z0, z2, 0),
      23  		z0 = svqrdcmlah (z1, z0, z2, 0))
      24  
      25  /*
      26  ** qrdcmlah_0_s64_tied3:
      27  **	mov	(z[0-9]+\.d), z0\.d
      28  **	movprfx	z0, z1
      29  **	sqrdcmlah	z0\.d, z2\.d, \1, #0
      30  **	ret
      31  */
      32  TEST_UNIFORM_Z (qrdcmlah_0_s64_tied3, svint64_t,
      33  		z0 = svqrdcmlah_s64 (z1, z2, z0, 0),
      34  		z0 = svqrdcmlah (z1, z2, z0, 0))
      35  
      36  /*
      37  ** qrdcmlah_0_s64_untied:
      38  **	movprfx	z0, z1
      39  **	sqrdcmlah	z0\.d, z2\.d, z3\.d, #0
      40  **	ret
      41  */
      42  TEST_UNIFORM_Z (qrdcmlah_0_s64_untied, svint64_t,
      43  		z0 = svqrdcmlah_s64 (z1, z2, z3, 0),
      44  		z0 = svqrdcmlah (z1, z2, z3, 0))
      45  
      46  /*
      47  ** qrdcmlah_90_s64_tied1:
      48  **	sqrdcmlah	z0\.d, z1\.d, z2\.d, #90
      49  **	ret
      50  */
      51  TEST_UNIFORM_Z (qrdcmlah_90_s64_tied1, svint64_t,
      52  		z0 = svqrdcmlah_s64 (z0, z1, z2, 90),
      53  		z0 = svqrdcmlah (z0, z1, z2, 90))
      54  
      55  /*
      56  ** qrdcmlah_90_s64_tied2:
      57  **	mov	(z[0-9]+\.d), z0\.d
      58  **	movprfx	z0, z1
      59  **	sqrdcmlah	z0\.d, \1, z2\.d, #90
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (qrdcmlah_90_s64_tied2, svint64_t,
      63  		z0 = svqrdcmlah_s64 (z1, z0, z2, 90),
      64  		z0 = svqrdcmlah (z1, z0, z2, 90))
      65  
      66  /*
      67  ** qrdcmlah_90_s64_tied3:
      68  **	mov	(z[0-9]+\.d), z0\.d
      69  **	movprfx	z0, z1
      70  **	sqrdcmlah	z0\.d, z2\.d, \1, #90
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (qrdcmlah_90_s64_tied3, svint64_t,
      74  		z0 = svqrdcmlah_s64 (z1, z2, z0, 90),
      75  		z0 = svqrdcmlah (z1, z2, z0, 90))
      76  
      77  /*
      78  ** qrdcmlah_90_s64_untied:
      79  **	movprfx	z0, z1
      80  **	sqrdcmlah	z0\.d, z2\.d, z3\.d, #90
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (qrdcmlah_90_s64_untied, svint64_t,
      84  		z0 = svqrdcmlah_s64 (z1, z2, z3, 90),
      85  		z0 = svqrdcmlah (z1, z2, z3, 90))
      86  
      87  /*
      88  ** qrdcmlah_180_s64_tied1:
      89  **	sqrdcmlah	z0\.d, z1\.d, z2\.d, #180
      90  **	ret
      91  */
      92  TEST_UNIFORM_Z (qrdcmlah_180_s64_tied1, svint64_t,
      93  		z0 = svqrdcmlah_s64 (z0, z1, z2, 180),
      94  		z0 = svqrdcmlah (z0, z1, z2, 180))
      95  
      96  /*
      97  ** qrdcmlah_180_s64_tied2:
      98  **	mov	(z[0-9]+\.d), z0\.d
      99  **	movprfx	z0, z1
     100  **	sqrdcmlah	z0\.d, \1, z2\.d, #180
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (qrdcmlah_180_s64_tied2, svint64_t,
     104  		z0 = svqrdcmlah_s64 (z1, z0, z2, 180),
     105  		z0 = svqrdcmlah (z1, z0, z2, 180))
     106  
     107  /*
     108  ** qrdcmlah_180_s64_tied3:
     109  **	mov	(z[0-9]+\.d), z0\.d
     110  **	movprfx	z0, z1
     111  **	sqrdcmlah	z0\.d, z2\.d, \1, #180
     112  **	ret
     113  */
     114  TEST_UNIFORM_Z (qrdcmlah_180_s64_tied3, svint64_t,
     115  		z0 = svqrdcmlah_s64 (z1, z2, z0, 180),
     116  		z0 = svqrdcmlah (z1, z2, z0, 180))
     117  
     118  /*
     119  ** qrdcmlah_180_s64_untied:
     120  **	movprfx	z0, z1
     121  **	sqrdcmlah	z0\.d, z2\.d, z3\.d, #180
     122  **	ret
     123  */
     124  TEST_UNIFORM_Z (qrdcmlah_180_s64_untied, svint64_t,
     125  		z0 = svqrdcmlah_s64 (z1, z2, z3, 180),
     126  		z0 = svqrdcmlah (z1, z2, z3, 180))
     127  
     128  /*
     129  ** qrdcmlah_270_s64_tied1:
     130  **	sqrdcmlah	z0\.d, z1\.d, z2\.d, #270
     131  **	ret
     132  */
     133  TEST_UNIFORM_Z (qrdcmlah_270_s64_tied1, svint64_t,
     134  		z0 = svqrdcmlah_s64 (z0, z1, z2, 270),
     135  		z0 = svqrdcmlah (z0, z1, z2, 270))
     136  
     137  /*
     138  ** qrdcmlah_270_s64_tied2:
     139  **	mov	(z[0-9]+\.d), z0\.d
     140  **	movprfx	z0, z1
     141  **	sqrdcmlah	z0\.d, \1, z2\.d, #270
     142  **	ret
     143  */
     144  TEST_UNIFORM_Z (qrdcmlah_270_s64_tied2, svint64_t,
     145  		z0 = svqrdcmlah_s64 (z1, z0, z2, 270),
     146  		z0 = svqrdcmlah (z1, z0, z2, 270))
     147  
     148  /*
     149  ** qrdcmlah_270_s64_tied3:
     150  **	mov	(z[0-9]+\.d), z0\.d
     151  **	movprfx	z0, z1
     152  **	sqrdcmlah	z0\.d, z2\.d, \1, #270
     153  **	ret
     154  */
     155  TEST_UNIFORM_Z (qrdcmlah_270_s64_tied3, svint64_t,
     156  		z0 = svqrdcmlah_s64 (z1, z2, z0, 270),
     157  		z0 = svqrdcmlah (z1, z2, z0, 270))
     158  
     159  /*
     160  ** qrdcmlah_270_s64_untied:
     161  **	movprfx	z0, z1
     162  **	sqrdcmlah	z0\.d, z2\.d, z3\.d, #270
     163  **	ret
     164  */
     165  TEST_UNIFORM_Z (qrdcmlah_270_s64_untied, svint64_t,
     166  		z0 = svqrdcmlah_s64 (z1, z2, z3, 270),
     167  		z0 = svqrdcmlah (z1, z2, z3, 270))