(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
usmmla_s32.c
       1  /* { dg-require-effective-target aarch64_asm_i8mm_ok } */
       2  /* { dg-additional-options "-march=armv8.2-a+sve+i8mm" } */
       3  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       4  
       5  #include "test_sve_acle.h"
       6  
       7  /*
       8  ** usmmla_s32_tied1:
       9  **	usmmla	z0\.s, z2\.b, z4\.b
      10  **	ret
      11  */
      12  TEST_TRIPLE_Z (usmmla_s32_tied1, svint32_t, svuint8_t, svint8_t,
      13  	       z0 = svusmmla_s32 (z0, z2, z4),
      14  	       z0 = svusmmla (z0, z2, z4))
      15  
      16  /*
      17  ** usmmla_s32_tied2:
      18  **	mov	(z[0-9]+)\.d, z0\.d
      19  **	movprfx	z0, z2
      20  **	usmmla	z0\.s, \1\.b, z4\.b
      21  **	ret
      22  */
      23  TEST_TRIPLE_Z_REV2 (usmmla_s32_tied2, svint32_t, svuint8_t, svint8_t,
      24  		    z0_res = svusmmla_s32 (z2, z0, z4),
      25  		    z0_res = svusmmla (z2, z0, z4))
      26  
      27  /*
      28  ** usmmla_s32_tied3:
      29  **	mov	(z[0-9]+)\.d, z0\.d
      30  **	movprfx	z0, z4
      31  **	usmmla	z0\.s, z2\.b, \1\.b
      32  **	ret
      33  */
      34  TEST_TRIPLE_Z_REV (usmmla_s32_tied3, svint32_t, svuint8_t, svint8_t,
      35  		   z0_res = svusmmla_s32 (z4, z2, z0),
      36  		   z0_res = svusmmla (z4, z2, z0))
      37  
      38  /*
      39  ** usmmla_s32_untied:
      40  **	movprfx	z0, z1
      41  **	usmmla	z0\.s, z2\.b, z4\.b
      42  **	ret
      43  */
      44  TEST_TRIPLE_Z (usmmla_s32_untied, svint32_t, svuint8_t, svint8_t,
      45  	       z0 = svusmmla_s32 (z1, z2, z4),
      46  	       z0 = svusmmla (z1, z2, z4))