(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
qsub_u32.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** qsub_u32_tied1:
       7  **	uqsub	z0\.s, z0\.s, z1\.s
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (qsub_u32_tied1, svuint32_t,
      11  		z0 = svqsub_u32 (z0, z1),
      12  		z0 = svqsub (z0, z1))
      13  
      14  /*
      15  ** qsub_u32_tied2:
      16  **	uqsub	z0\.s, z1\.s, z0\.s
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (qsub_u32_tied2, svuint32_t,
      20  		z0 = svqsub_u32 (z1, z0),
      21  		z0 = svqsub (z1, z0))
      22  
      23  /*
      24  ** qsub_u32_untied:
      25  **	uqsub	z0\.s, z1\.s, z2\.s
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (qsub_u32_untied, svuint32_t,
      29  		z0 = svqsub_u32 (z1, z2),
      30  		z0 = svqsub (z1, z2))
      31  
      32  /*
      33  ** qsub_w0_u32_tied1:
      34  **	mov	(z[0-9]+\.s), w0
      35  **	uqsub	z0\.s, z0\.s, \1
      36  **	ret
      37  */
      38  TEST_UNIFORM_ZX (qsub_w0_u32_tied1, svuint32_t, uint32_t,
      39  		 z0 = svqsub_n_u32 (z0, x0),
      40  		 z0 = svqsub (z0, x0))
      41  
      42  /*
      43  ** qsub_w0_u32_untied:
      44  **	mov	(z[0-9]+\.s), w0
      45  **	uqsub	z0\.s, z1\.s, \1
      46  **	ret
      47  */
      48  TEST_UNIFORM_ZX (qsub_w0_u32_untied, svuint32_t, uint32_t,
      49  		 z0 = svqsub_n_u32 (z1, x0),
      50  		 z0 = svqsub (z1, x0))
      51  
      52  /*
      53  ** qsub_1_u32_tied1:
      54  **	uqsub	z0\.s, z0\.s, #1
      55  **	ret
      56  */
      57  TEST_UNIFORM_Z (qsub_1_u32_tied1, svuint32_t,
      58  		z0 = svqsub_n_u32 (z0, 1),
      59  		z0 = svqsub (z0, 1))
      60  
      61  /*
      62  ** qsub_1_u32_untied:
      63  **	movprfx	z0, z1
      64  **	uqsub	z0\.s, z0\.s, #1
      65  **	ret
      66  */
      67  TEST_UNIFORM_Z (qsub_1_u32_untied, svuint32_t,
      68  		z0 = svqsub_n_u32 (z1, 1),
      69  		z0 = svqsub (z1, 1))
      70  
      71  /*
      72  ** qsub_127_u32:
      73  **	uqsub	z0\.s, z0\.s, #127
      74  **	ret
      75  */
      76  TEST_UNIFORM_Z (qsub_127_u32, svuint32_t,
      77  		z0 = svqsub_n_u32 (z0, 127),
      78  		z0 = svqsub (z0, 127))
      79  
      80  /*
      81  ** qsub_128_u32:
      82  **	uqsub	z0\.s, z0\.s, #128
      83  **	ret
      84  */
      85  TEST_UNIFORM_Z (qsub_128_u32, svuint32_t,
      86  		z0 = svqsub_n_u32 (z0, 128),
      87  		z0 = svqsub (z0, 128))
      88  
      89  /*
      90  ** qsub_255_u32:
      91  **	uqsub	z0\.s, z0\.s, #255
      92  **	ret
      93  */
      94  TEST_UNIFORM_Z (qsub_255_u32, svuint32_t,
      95  		z0 = svqsub_n_u32 (z0, 255),
      96  		z0 = svqsub (z0, 255))
      97  
      98  /*
      99  ** qsub_m1_u32:
     100  **	mov	(z[0-9]+)\.b, #-1
     101  **	uqsub	z0\.s, z0\.s, \1\.s
     102  **	ret
     103  */
     104  TEST_UNIFORM_Z (qsub_m1_u32, svuint32_t,
     105  		z0 = svqsub_n_u32 (z0, -1),
     106  		z0 = svqsub (z0, -1))
     107  
     108  /*
     109  ** qsub_m127_u32:
     110  **	mov	(z[0-9]+\.s), #-127
     111  **	uqsub	z0\.s, z0\.s, \1
     112  **	ret
     113  */
     114  TEST_UNIFORM_Z (qsub_m127_u32, svuint32_t,
     115  		z0 = svqsub_n_u32 (z0, -127),
     116  		z0 = svqsub (z0, -127))
     117  
     118  /*
     119  ** qsub_m128_u32:
     120  **	mov	(z[0-9]+\.s), #-128
     121  **	uqsub	z0\.s, z0\.s, \1
     122  **	ret
     123  */
     124  TEST_UNIFORM_Z (qsub_m128_u32, svuint32_t,
     125  		z0 = svqsub_n_u32 (z0, -128),
     126  		z0 = svqsub (z0, -128))