1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** cmla_lane_0_0_s16_tied1:
       7  **	cmla	z0\.h, z1\.h, z2\.h\[0\], #0
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (cmla_lane_0_0_s16_tied1, svint16_t,
      11  		z0 = svcmla_lane_s16 (z0, z1, z2, 0, 0),
      12  		z0 = svcmla_lane (z0, z1, z2, 0, 0))
      13  
      14  /*
      15  ** cmla_lane_0_0_s16_tied2:
      16  **	mov	(z[0-9]+)\.d, z0\.d
      17  **	movprfx	z0, z1
      18  **	cmla	z0\.h, \1\.h, z2\.h\[0\], #0
      19  **	ret
      20  */
      21  TEST_UNIFORM_Z (cmla_lane_0_0_s16_tied2, svint16_t,
      22  		z0 = svcmla_lane_s16 (z1, z0, z2, 0, 0),
      23  		z0 = svcmla_lane (z1, z0, z2, 0, 0))
      24  
      25  /*
      26  ** cmla_lane_0_0_s16_tied3:
      27  **	mov	(z[0-9]+)\.d, z0\.d
      28  **	movprfx	z0, z1
      29  **	cmla	z0\.h, z2\.h, \1\.h\[0\], #0
      30  **	ret
      31  */
      32  TEST_UNIFORM_Z (cmla_lane_0_0_s16_tied3, svint16_t,
      33  		z0 = svcmla_lane_s16 (z1, z2, z0, 0, 0),
      34  		z0 = svcmla_lane (z1, z2, z0, 0, 0))
      35  
      36  /*
      37  ** cmla_lane_0_0_s16_untied:
      38  **	movprfx	z0, z1
      39  **	cmla	z0\.h, z2\.h, z3\.h\[0\], #0
      40  **	ret
      41  */
      42  TEST_UNIFORM_Z (cmla_lane_0_0_s16_untied, svint16_t,
      43  		z0 = svcmla_lane_s16 (z1, z2, z3, 0, 0),
      44  		z0 = svcmla_lane (z1, z2, z3, 0, 0))
      45  
      46  /*
      47  ** cmla_lane_0_90_s16_tied1:
      48  **	cmla	z0\.h, z1\.h, z2\.h\[0\], #90
      49  **	ret
      50  */
      51  TEST_UNIFORM_Z (cmla_lane_0_90_s16_tied1, svint16_t,
      52  		z0 = svcmla_lane_s16 (z0, z1, z2, 0, 90),
      53  		z0 = svcmla_lane (z0, z1, z2, 0, 90))
      54  
      55  /*
      56  ** cmla_lane_0_90_s16_tied2:
      57  **	mov	(z[0-9]+)\.d, z0\.d
      58  **	movprfx	z0, z1
      59  **	cmla	z0\.h, \1\.h, z2\.h\[0\], #90
      60  **	ret
      61  */
      62  TEST_UNIFORM_Z (cmla_lane_0_90_s16_tied2, svint16_t,
      63  		z0 = svcmla_lane_s16 (z1, z0, z2, 0, 90),
      64  		z0 = svcmla_lane (z1, z0, z2, 0, 90))
      65  
      66  /*
      67  ** cmla_lane_0_90_s16_tied3:
      68  **	mov	(z[0-9]+)\.d, z0\.d
      69  **	movprfx	z0, z1
      70  **	cmla	z0\.h, z2\.h, \1\.h\[0\], #90
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (cmla_lane_0_90_s16_tied3, svint16_t,
      74  		z0 = svcmla_lane_s16 (z1, z2, z0, 0, 90),
      75  		z0 = svcmla_lane (z1, z2, z0, 0, 90))
      76  
      77  /*
      78  ** cmla_lane_0_90_s16_untied:
      79  **	movprfx	z0, z1
      80  **	cmla	z0\.h, z2\.h, z3\.h\[0\], #90
      81  **	ret
      82  */
      83  TEST_UNIFORM_Z (cmla_lane_0_90_s16_untied, svint16_t,
      84  		z0 = svcmla_lane_s16 (z1, z2, z3, 0, 90),
      85  		z0 = svcmla_lane (z1, z2, z3, 0, 90))
      86  
      87  /*
      88  ** cmla_lane_0_180_s16_tied1:
      89  **	cmla	z0\.h, z1\.h, z2\.h\[0\], #180
      90  **	ret
      91  */
      92  TEST_UNIFORM_Z (cmla_lane_0_180_s16_tied1, svint16_t,
      93  		z0 = svcmla_lane_s16 (z0, z1, z2, 0, 180),
      94  		z0 = svcmla_lane (z0, z1, z2, 0, 180))
      95  
      96  /*
      97  ** cmla_lane_0_180_s16_tied2:
      98  **	mov	(z[0-9]+)\.d, z0\.d
      99  **	movprfx	z0, z1
     100  **	cmla	z0\.h, \1\.h, z2\.h\[0\], #180
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (cmla_lane_0_180_s16_tied2, svint16_t,
     104  		z0 = svcmla_lane_s16 (z1, z0, z2, 0, 180),
     105  		z0 = svcmla_lane (z1, z0, z2, 0, 180))
     106  
     107  /*
     108  ** cmla_lane_0_180_s16_tied3:
     109  **	mov	(z[0-9]+)\.d, z0\.d
     110  **	movprfx	z0, z1
     111  **	cmla	z0\.h, z2\.h, \1\.h\[0\], #180
     112  **	ret
     113  */
     114  TEST_UNIFORM_Z (cmla_lane_0_180_s16_tied3, svint16_t,
     115  		z0 = svcmla_lane_s16 (z1, z2, z0, 0, 180),
     116  		z0 = svcmla_lane (z1, z2, z0, 0, 180))
     117  
     118  /*
     119  ** cmla_lane_0_180_s16_untied:
     120  **	movprfx	z0, z1
     121  **	cmla	z0\.h, z2\.h, z3\.h\[0\], #180
     122  **	ret
     123  */
     124  TEST_UNIFORM_Z (cmla_lane_0_180_s16_untied, svint16_t,
     125  		z0 = svcmla_lane_s16 (z1, z2, z3, 0, 180),
     126  		z0 = svcmla_lane (z1, z2, z3, 0, 180))
     127  
     128  /*
     129  ** cmla_lane_0_270_s16_tied1:
     130  **	cmla	z0\.h, z1\.h, z2\.h\[0\], #270
     131  **	ret
     132  */
     133  TEST_UNIFORM_Z (cmla_lane_0_270_s16_tied1, svint16_t,
     134  		z0 = svcmla_lane_s16 (z0, z1, z2, 0, 270),
     135  		z0 = svcmla_lane (z0, z1, z2, 0, 270))
     136  
     137  /*
     138  ** cmla_lane_0_270_s16_tied2:
     139  **	mov	(z[0-9]+)\.d, z0\.d
     140  **	movprfx	z0, z1
     141  **	cmla	z0\.h, \1\.h, z2\.h\[0\], #270
     142  **	ret
     143  */
     144  TEST_UNIFORM_Z (cmla_lane_0_270_s16_tied2, svint16_t,
     145  		z0 = svcmla_lane_s16 (z1, z0, z2, 0, 270),
     146  		z0 = svcmla_lane (z1, z0, z2, 0, 270))
     147  
     148  /*
     149  ** cmla_lane_0_270_s16_tied3:
     150  **	mov	(z[0-9]+)\.d, z0\.d
     151  **	movprfx	z0, z1
     152  **	cmla	z0\.h, z2\.h, \1\.h\[0\], #270
     153  **	ret
     154  */
     155  TEST_UNIFORM_Z (cmla_lane_0_270_s16_tied3, svint16_t,
     156  		z0 = svcmla_lane_s16 (z1, z2, z0, 0, 270),
     157  		z0 = svcmla_lane (z1, z2, z0, 0, 270))
     158  
     159  /*
     160  ** cmla_lane_0_270_s16_untied:
     161  **	movprfx	z0, z1
     162  **	cmla	z0\.h, z2\.h, z3\.h\[0\], #270
     163  **	ret
     164  */
     165  TEST_UNIFORM_Z (cmla_lane_0_270_s16_untied, svint16_t,
     166  		z0 = svcmla_lane_s16 (z1, z2, z3, 0, 270),
     167  		z0 = svcmla_lane (z1, z2, z3, 0, 270))
     168  
     169  /*
     170  ** cmla_lane_1_s16:
     171  **	cmla	z0\.h, z1\.h, z2\.h\[1\], #0
     172  **	ret
     173  */
     174  TEST_UNIFORM_Z (cmla_lane_1_s16, svint16_t,
     175  		z0 = svcmla_lane_s16 (z0, z1, z2, 1, 0),
     176  		z0 = svcmla_lane (z0, z1, z2, 1, 0))
     177  
     178  /*
     179  ** cmla_lane_2_s16:
     180  **	cmla	z0\.h, z1\.h, z2\.h\[2\], #0
     181  **	ret
     182  */
     183  TEST_UNIFORM_Z (cmla_lane_2_s16, svint16_t,
     184  		z0 = svcmla_lane_s16 (z0, z1, z2, 2, 0),
     185  		z0 = svcmla_lane (z0, z1, z2, 2, 0))
     186  
     187  /*
     188  ** cmla_lane_3_s16:
     189  **	cmla	z0\.h, z1\.h, z2\.h\[3\], #0
     190  **	ret
     191  */
     192  TEST_UNIFORM_Z (cmla_lane_3_s16, svint16_t,
     193  		z0 = svcmla_lane_s16 (z0, z1, z2, 3, 0),
     194  		z0 = svcmla_lane (z0, z1, z2, 3, 0))
     195  
     196  /*
     197  ** cmla_lane_z8_s16:
     198  **	str	d8, \[sp, -16\]!
     199  **	mov	(z[0-7])\.d, z8\.d
     200  **	cmla	z0\.h, z1\.h, \1\.h\[1\], #0
     201  **	ldr	d8, \[sp\], 16
     202  **	ret
     203  */
     204  TEST_DUAL_LANE_REG (cmla_lane_z8_s16, svint16_t, svint16_t, z8,
     205  		    z0 = svcmla_lane_s16 (z0, z1, z8, 1, 0),
     206  		    z0 = svcmla_lane (z0, z1, z8, 1, 0))
     207  
     208  /*
     209  ** cmla_lane_z16_s16:
     210  **	mov	(z[0-7])\.d, z16\.d
     211  **	cmla	z0\.h, z1\.h, \1\.h\[1\], #0
     212  **	ret
     213  */
     214  TEST_DUAL_LANE_REG (cmla_lane_z16_s16, svint16_t, svint16_t, z16,
     215  		    z0 = svcmla_lane_s16 (z0, z1, z16, 1, 0),
     216  		    z0 = svcmla_lane (z0, z1, z16, 1, 0))