(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve2/
acle/
asm/
cmla_lane_u32.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** cmla_lane_0_0_u32_tied1:
       7  **	cmla	z0\.s, z1\.s, z2\.s\[0\], #0
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (cmla_lane_0_0_u32_tied1, svuint32_t,
      11  		z0 = svcmla_lane_u32 (z0, z1, z2, 0, 0),
      12  		z0 = svcmla_lane (z0, z1, z2, 0, 0))
      13  
      14  /*
      15  ** cmla_lane_0_0_u32_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z1
      18  **	cmla	z0\.s, \1\.s, z2\.s\[0\], #0
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (cmla_lane_0_0_u32_tied2, svuint32_t,
      22  		z0 = svcmla_lane_u32 (z1, z0, z2, 0, 0),
      23  		z0 = svcmla_lane (z1, z0, z2, 0, 0))
      24  
      25  /*
      26  ** cmla_lane_0_0_u32_tied3:
      27  **	mov	(z[0-9]+)\.d, z0\.d
      28  **	movprfx	z0, z1
      29  **	cmla	z0\.s, z2\.s, \1\.s\[0\], #0
      30  **	ret
      31  */
      32  TEST_UNIFORM_Z (cmla_lane_0_0_u32_tied3, svuint32_t,
      33  		z0 = svcmla_lane_u32 (z1, z2, z0, 0, 0),
      34  		z0 = svcmla_lane (z1, z2, z0, 0, 0))
      35  
      36  /*
      37  ** cmla_lane_0_0_u32_untied:
      38  **	movprfx	z0, z1
      39  **	cmla	z0\.s, z2\.s, z3\.s\[0\], #0
      40  **	ret
      41  */
      42  TEST_UNIFORM_Z (cmla_lane_0_0_u32_untied, svuint32_t,
      43  		z0 = svcmla_lane_u32 (z1, z2, z3, 0, 0),
      44  		z0 = svcmla_lane (z1, z2, z3, 0, 0))
      45  
      46  /*
      47  ** cmla_lane_0_90_u32_tied1:
      48  **	cmla	z0\.s, z1\.s, z2\.s\[0\], #90
      49  **	ret
      50  */
      51  TEST_UNIFORM_Z (cmla_lane_0_90_u32_tied1, svuint32_t,
      52  		z0 = svcmla_lane_u32 (z0, z1, z2, 0, 90),
      53  		z0 = svcmla_lane (z0, z1, z2, 0, 90))
      54  
      55  /*
      56  ** cmla_lane_0_90_u32_tied2:
      57  **	mov	(z[0-9]+)\.d, z0\.d
      58  **	movprfx	z0, z1
      59  **	cmla	z0\.s, \1\.s, z2\.s\[0\], #90
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (cmla_lane_0_90_u32_tied2, svuint32_t,
      63  		z0 = svcmla_lane_u32 (z1, z0, z2, 0, 90),
      64  		z0 = svcmla_lane (z1, z0, z2, 0, 90))
      65  
      66  /*
      67  ** cmla_lane_0_90_u32_tied3:
      68  **	mov	(z[0-9]+)\.d, z0\.d
      69  **	movprfx	z0, z1
      70  **	cmla	z0\.s, z2\.s, \1\.s\[0\], #90
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (cmla_lane_0_90_u32_tied3, svuint32_t,
      74  		z0 = svcmla_lane_u32 (z1, z2, z0, 0, 90),
      75  		z0 = svcmla_lane (z1, z2, z0, 0, 90))
      76  
      77  /*
      78  ** cmla_lane_0_90_u32_untied:
      79  **	movprfx	z0, z1
      80  **	cmla	z0\.s, z2\.s, z3\.s\[0\], #90
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (cmla_lane_0_90_u32_untied, svuint32_t,
      84  		z0 = svcmla_lane_u32 (z1, z2, z3, 0, 90),
      85  		z0 = svcmla_lane (z1, z2, z3, 0, 90))
      86  
      87  /*
      88  ** cmla_lane_0_180_u32_tied1:
      89  **	cmla	z0\.s, z1\.s, z2\.s\[0\], #180
      90  **	ret
      91  */
      92  TEST_UNIFORM_Z (cmla_lane_0_180_u32_tied1, svuint32_t,
      93  		z0 = svcmla_lane_u32 (z0, z1, z2, 0, 180),
      94  		z0 = svcmla_lane (z0, z1, z2, 0, 180))
      95  
      96  /*
      97  ** cmla_lane_0_180_u32_tied2:
      98  **	mov	(z[0-9]+)\.d, z0\.d
      99  **	movprfx	z0, z1
     100  **	cmla	z0\.s, \1\.s, z2\.s\[0\], #180
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (cmla_lane_0_180_u32_tied2, svuint32_t,
     104  		z0 = svcmla_lane_u32 (z1, z0, z2, 0, 180),
     105  		z0 = svcmla_lane (z1, z0, z2, 0, 180))
     106  
     107  /*
     108  ** cmla_lane_0_180_u32_tied3:
     109  **	mov	(z[0-9]+)\.d, z0\.d
     110  **	movprfx	z0, z1
     111  **	cmla	z0\.s, z2\.s, \1\.s\[0\], #180
     112  **	ret
     113  */
     114  TEST_UNIFORM_Z (cmla_lane_0_180_u32_tied3, svuint32_t,
     115  		z0 = svcmla_lane_u32 (z1, z2, z0, 0, 180),
     116  		z0 = svcmla_lane (z1, z2, z0, 0, 180))
     117  
     118  /*
     119  ** cmla_lane_0_180_u32_untied:
     120  **	movprfx	z0, z1
     121  **	cmla	z0\.s, z2\.s, z3\.s\[0\], #180
     122  **	ret
     123  */
     124  TEST_UNIFORM_Z (cmla_lane_0_180_u32_untied, svuint32_t,
     125  		z0 = svcmla_lane_u32 (z1, z2, z3, 0, 180),
     126  		z0 = svcmla_lane (z1, z2, z3, 0, 180))
     127  
     128  /*
     129  ** cmla_lane_0_270_u32_tied1:
     130  **	cmla	z0\.s, z1\.s, z2\.s\[0\], #270
     131  **	ret
     132  */
     133  TEST_UNIFORM_Z (cmla_lane_0_270_u32_tied1, svuint32_t,
     134  		z0 = svcmla_lane_u32 (z0, z1, z2, 0, 270),
     135  		z0 = svcmla_lane (z0, z1, z2, 0, 270))
     136  
     137  /*
     138  ** cmla_lane_0_270_u32_tied2:
     139  **	mov	(z[0-9]+)\.d, z0\.d
     140  **	movprfx	z0, z1
     141  **	cmla	z0\.s, \1\.s, z2\.s\[0\], #270
     142  **	ret
     143  */
     144  TEST_UNIFORM_Z (cmla_lane_0_270_u32_tied2, svuint32_t,
     145  		z0 = svcmla_lane_u32 (z1, z0, z2, 0, 270),
     146  		z0 = svcmla_lane (z1, z0, z2, 0, 270))
     147  
     148  /*
     149  ** cmla_lane_0_270_u32_tied3:
     150  **	mov	(z[0-9]+)\.d, z0\.d
     151  **	movprfx	z0, z1
     152  **	cmla	z0\.s, z2\.s, \1\.s\[0\], #270
     153  **	ret
     154  */
     155  TEST_UNIFORM_Z (cmla_lane_0_270_u32_tied3, svuint32_t,
     156  		z0 = svcmla_lane_u32 (z1, z2, z0, 0, 270),
     157  		z0 = svcmla_lane (z1, z2, z0, 0, 270))
     158  
     159  /*
     160  ** cmla_lane_0_270_u32_untied:
     161  **	movprfx	z0, z1
     162  **	cmla	z0\.s, z2\.s, z3\.s\[0\], #270
     163  **	ret
     164  */
     165  TEST_UNIFORM_Z (cmla_lane_0_270_u32_untied, svuint32_t,
     166  		z0 = svcmla_lane_u32 (z1, z2, z3, 0, 270),
     167  		z0 = svcmla_lane (z1, z2, z3, 0, 270))
     168  
     169  /*
     170  ** cmla_lane_1_u32:
     171  **	cmla	z0\.s, z1\.s, z2\.s\[1\], #0
     172  **	ret
     173  */
     174  TEST_UNIFORM_Z (cmla_lane_1_u32, svuint32_t,
     175  		z0 = svcmla_lane_u32 (z0, z1, z2, 1, 0),
     176  		z0 = svcmla_lane (z0, z1, z2, 1, 0))
     177  
     178  /*
     179  ** cmla_lane_z8_u32:
     180  **	str	d8, \[sp, -16\]!
     181  **	mov	(z[0-7])\.d, z8\.d
     182  **	cmla	z0\.s, z1\.s, \1\.s\[1\], #0
     183  **	ldr	d8, \[sp\], 16
     184  **	ret
     185  */
     186  TEST_DUAL_LANE_REG (cmla_lane_z8_u32, svuint32_t, svuint32_t, z8,
     187  		    z0 = svcmla_lane_u32 (z0, z1, z8, 1, 0),
     188  		    z0 = svcmla_lane (z0, z1, z8, 1, 0))
     189  
     190  /*
     191  ** cmla_lane_z16_u32:
     192  **	mov	(z[0-7])\.d, z16\.d
     193  **	cmla	z0\.s, z1\.s, \1\.s\[1\], #0
     194  **	ret
     195  */
     196  TEST_DUAL_LANE_REG (cmla_lane_z16_u32, svuint32_t, svuint32_t, z16,
     197  		    z0 = svcmla_lane_u32 (z0, z1, z16, 1, 0),
     198  		    z0 = svcmla_lane (z0, z1, z16, 1, 0))