(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
qadd_u64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qadd_u64_tied1:
       7  **	uqadd	z0\.d, (z0\.d, z1\.d|z1\.d, z0\.d)
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qadd_u64_tied1, svuint64_t,
      11  		z0 = svqadd_u64 (z0, z1),
      12  		z0 = svqadd (z0, z1))
      13  
      14  /*
      15  ** qadd_u64_tied2:
      16  **	uqadd	z0\.d, (z0\.d, z1\.d|z1\.d, z0\.d)
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (qadd_u64_tied2, svuint64_t,
      20  		z0 = svqadd_u64 (z1, z0),
      21  		z0 = svqadd (z1, z0))
      22  
      23  /*
      24  ** qadd_u64_untied:
      25  **	uqadd	z0\.d, (z1\.d, z2\.d|z2\.d, z1\.d)
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (qadd_u64_untied, svuint64_t,
      29  		z0 = svqadd_u64 (z1, z2),
      30  		z0 = svqadd (z1, z2))
      31  
      32  /*
      33  ** qadd_x0_u64_tied1:
      34  **	mov	(z[0-9]+\.d), x0
      35  **	uqadd	z0\.d, (z0\.d, \1|\1, z0\.d)
      36  **	ret
      37  */
      38  TEST_UNIFORM_ZX (qadd_x0_u64_tied1, svuint64_t, uint64_t,
      39  		 z0 = svqadd_n_u64 (z0, x0),
      40  		 z0 = svqadd (z0, x0))
      41  
      42  /*
      43  ** qadd_x0_u64_untied:
      44  **	mov	(z[0-9]+\.d), x0
      45  **	uqadd	z0\.d, (z1\.d, \1|\1, z1\.d)
      46  **	ret
      47  */
      48  TEST_UNIFORM_ZX (qadd_x0_u64_untied, svuint64_t, uint64_t,
      49  		 z0 = svqadd_n_u64 (z1, x0),
      50  		 z0 = svqadd (z1, x0))
      51  
      52  /*
      53  ** qadd_1_u64_tied1:
      54  **	uqadd	z0\.d, z0\.d, #1
      55  **	ret
      56  */
      57  TEST_UNIFORM_Z (qadd_1_u64_tied1, svuint64_t,
      58  		z0 = svqadd_n_u64 (z0, 1),
      59  		z0 = svqadd (z0, 1))
      60  
      61  /*
      62  ** qadd_1_u64_untied:
      63  **	movprfx	z0, z1
      64  **	uqadd	z0\.d, z0\.d, #1
      65  **	ret
      66  */
      67  TEST_UNIFORM_Z (qadd_1_u64_untied, svuint64_t,
      68  		z0 = svqadd_n_u64 (z1, 1),
      69  		z0 = svqadd (z1, 1))
      70  
      71  /*
      72  ** qadd_127_u64:
      73  **	uqadd	z0\.d, z0\.d, #127
      74  **	ret
      75  */
      76  TEST_UNIFORM_Z (qadd_127_u64, svuint64_t,
      77  		z0 = svqadd_n_u64 (z0, 127),
      78  		z0 = svqadd (z0, 127))
      79  
      80  /*
      81  ** qadd_128_u64:
      82  **	uqadd	z0\.d, z0\.d, #128
      83  **	ret
      84  */
      85  TEST_UNIFORM_Z (qadd_128_u64, svuint64_t,
      86  		z0 = svqadd_n_u64 (z0, 128),
      87  		z0 = svqadd (z0, 128))
      88  
      89  /*
      90  ** qadd_255_u64:
      91  **	uqadd	z0\.d, z0\.d, #255
      92  **	ret
      93  */
      94  TEST_UNIFORM_Z (qadd_255_u64, svuint64_t,
      95  		z0 = svqadd_n_u64 (z0, 255),
      96  		z0 = svqadd (z0, 255))
      97  
      98  /*
      99  ** qadd_m1_u64:
     100  **	mov	(z[0-9]+)\.b, #-1
     101  **	uqadd	z0\.d, (z0\.d, \1\.d|\1\.d, z0\.d)
     102  **	ret
     103  */
     104  TEST_UNIFORM_Z (qadd_m1_u64, svuint64_t,
     105  		z0 = svqadd_n_u64 (z0, -1),
     106  		z0 = svqadd (z0, -1))
     107  
     108  /*
     109  ** qadd_m127_u64:
     110  **	mov	(z[0-9]+\.d), #-127
     111  **	uqadd	z0\.d, (z0\.d, \1|\1, z0\.d)
     112  **	ret
     113  */
     114  TEST_UNIFORM_Z (qadd_m127_u64, svuint64_t,
     115  		z0 = svqadd_n_u64 (z0, -127),
     116  		z0 = svqadd (z0, -127))
     117  
     118  /*
     119  ** qadd_m128_u64:
     120  **	mov	(z[0-9]+\.d), #-128
     121  **	uqadd	z0\.d, (z0\.d, \1|\1, z0\.d)
     122  **	ret
     123  */
     124  TEST_UNIFORM_Z (qadd_m128_u64, svuint64_t,
     125  		z0 = svqadd_n_u64 (z0, -128),
     126  		z0 = svqadd (z0, -128))