1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qdmlslb_s64_tied1:
       7  **	sqdmlslb	z0\.d, z4\.s, z5\.s
       8  **	ret
       9  */
      10  TEST_DUAL_Z (qdmlslb_s64_tied1, svint64_t, svint32_t,
      11  	     z0 = svqdmlslb_s64 (z0, z4, z5),
      12  	     z0 = svqdmlslb (z0, z4, z5))
      13  
      14  /*
      15  ** qdmlslb_s64_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z4
      18  **	sqdmlslb	z0\.d, \1\.s, z1\.s
      19  **	ret
      20  */
      21  TEST_DUAL_Z_REV (qdmlslb_s64_tied2, svint64_t, svint32_t,
      22  		 z0_res = svqdmlslb_s64 (z4, z0, z1),
      23  		 z0_res = svqdmlslb (z4, z0, z1))
      24  
      25  /*
      26  ** qdmlslb_s64_tied3:
      27  **	mov	(z[0-9]+)\.d, z0\.d
      28  **	movprfx	z0, z4
      29  **	sqdmlslb	z0\.d, z1\.s, \1\.s
      30  **	ret
      31  */
      32  TEST_DUAL_Z_REV (qdmlslb_s64_tied3, svint64_t, svint32_t,
      33  		 z0_res = svqdmlslb_s64 (z4, z1, z0),
      34  		 z0_res = svqdmlslb (z4, z1, z0))
      35  
      36  /*
      37  ** qdmlslb_s64_untied:
      38  **	movprfx	z0, z1
      39  **	sqdmlslb	z0\.d, z4\.s, z5\.s
      40  **	ret
      41  */
      42  TEST_DUAL_Z (qdmlslb_s64_untied, svint64_t, svint32_t,
      43  	     z0 = svqdmlslb_s64 (z1, z4, z5),
      44  	     z0 = svqdmlslb (z1, z4, z5))
      45  
      46  /*
      47  ** qdmlslb_w0_s64_tied1:
      48  **	mov	(z[0-9]+\.s), w0
      49  **	sqdmlslb	z0\.d, z4\.s, \1
      50  **	ret
      51  */
      52  TEST_DUAL_ZX (qdmlslb_w0_s64_tied1, svint64_t, svint32_t, int32_t,
      53  	      z0 = svqdmlslb_n_s64 (z0, z4, x0),
      54  	      z0 = svqdmlslb (z0, z4, x0))
      55  
      56  /*
      57  ** qdmlslb_w0_s64_untied:
      58  **	mov	(z[0-9]+\.s), w0
      59  **	movprfx	z0, z1
      60  **	sqdmlslb	z0\.d, z4\.s, \1
      61  **	ret
      62  */
      63  TEST_DUAL_ZX (qdmlslb_w0_s64_untied, svint64_t, svint32_t, int32_t,
      64  	      z0 = svqdmlslb_n_s64 (z1, z4, x0),
      65  	      z0 = svqdmlslb (z1, z4, x0))
      66  
      67  /*
      68  ** qdmlslb_11_s64_tied1:
      69  **	mov	(z[0-9]+\.s), #11
      70  **	sqdmlslb	z0\.d, z4\.s, \1
      71  **	ret
      72  */
      73  TEST_DUAL_Z (qdmlslb_11_s64_tied1, svint64_t, svint32_t,
      74  	     z0 = svqdmlslb_n_s64 (z0, z4, 11),
      75  	     z0 = svqdmlslb (z0, z4, 11))
      76  
      77  /*
      78  ** qdmlslb_11_s64_untied:
      79  **	mov	(z[0-9]+\.s), #11
      80  **	movprfx	z0, z1
      81  **	sqdmlslb	z0\.d, z4\.s, \1
      82  **	ret
      83  */
      84  TEST_DUAL_Z (qdmlslb_11_s64_untied, svint64_t, svint32_t,
      85  	     z0 = svqdmlslb_n_s64 (z1, z4, 11),
      86  	     z0 = svqdmlslb (z1, z4, 11))