(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
set4_s64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** set4_s64_z24_0:
       7  **	mov	z25\.d, z5\.d
       8  **	mov	z26\.d, z6\.d
       9  **	mov	z27\.d, z7\.d
      10  **	mov	z24\.d, z0\.d
      11  **	ret
      12  */
      13  TEST_SET (set4_s64_z24_0, svint64x4_t, svint64_t,
      14  	  z24 = svset4_s64 (z4, 0, z0),
      15  	  z24 = svset4 (z4, 0, z0))
      16  
      17  /*
      18  ** set4_s64_z24_1:
      19  **	mov	z24\.d, z4\.d
      20  **	mov	z26\.d, z6\.d
      21  **	mov	z27\.d, z7\.d
      22  **	mov	z25\.d, z0\.d
      23  **	ret
      24  */
      25  TEST_SET (set4_s64_z24_1, svint64x4_t, svint64_t,
      26  	  z24 = svset4_s64 (z4, 1, z0),
      27  	  z24 = svset4 (z4, 1, z0))
      28  
      29  /*
      30  ** set4_s64_z24_2:
      31  **	mov	z24\.d, z4\.d
      32  **	mov	z25\.d, z5\.d
      33  **	mov	z27\.d, z7\.d
      34  **	mov	z26\.d, z0\.d
      35  **	ret
      36  */
      37  TEST_SET (set4_s64_z24_2, svint64x4_t, svint64_t,
      38  	  z24 = svset4_s64 (z4, 2, z0),
      39  	  z24 = svset4 (z4, 2, z0))
      40  
      41  /*
      42  ** set4_s64_z24_3:
      43  **	mov	z24\.d, z4\.d
      44  **	mov	z25\.d, z5\.d
      45  **	mov	z26\.d, z6\.d
      46  **	mov	z27\.d, z0\.d
      47  **	ret
      48  */
      49  TEST_SET (set4_s64_z24_3, svint64x4_t, svint64_t,
      50  	  z24 = svset4_s64 (z4, 3, z0),
      51  	  z24 = svset4 (z4, 3, z0))
      52  
      53  /*
      54  ** set4_s64_z4_0:
      55  **	mov	z4\.d, z0\.d
      56  **	ret
      57  */
      58  TEST_SET (set4_s64_z4_0, svint64x4_t, svint64_t,
      59  	  z4 = svset4_s64 (z4, 0, z0),
      60  	  z4 = svset4 (z4, 0, z0))
      61  
      62  /*
      63  ** set4_s64_z4_1:
      64  **	mov	z5\.d, z0\.d
      65  **	ret
      66  */
      67  TEST_SET (set4_s64_z4_1, svint64x4_t, svint64_t,
      68  	  z4 = svset4_s64 (z4, 1, z0),
      69  	  z4 = svset4 (z4, 1, z0))
      70  
      71  /*
      72  ** set4_s64_z4_2:
      73  **	mov	z6\.d, z0\.d
      74  **	ret
      75  */
      76  TEST_SET (set4_s64_z4_2, svint64x4_t, svint64_t,
      77  	  z4 = svset4_s64 (z4, 2, z0),
      78  	  z4 = svset4 (z4, 2, z0))
      79  
      80  /*
      81  ** set4_s64_z4_3:
      82  **	mov	z7\.d, z0\.d
      83  **	ret
      84  */
      85  TEST_SET (set4_s64_z4_3, svint64x4_t, svint64_t,
      86  	  z4 = svset4_s64 (z4, 3, z0),
      87  	  z4 = svset4 (z4, 3, z0))