(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
cmpge_wide_s8.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** cmpge_wide_s8_tied:
       7  **	cmpge	p0\.b, p0/z, z0\.b, z1\.d
       8  **	ret
       9  */
      10  TEST_COMPARE_DUAL_Z (cmpge_wide_s8_tied, svint8_t, svint64_t,
      11  		     p0 = svcmpge_wide_s8 (p0, z0, z1),
      12  		     p0 = svcmpge_wide (p0, z0, z1))
      13  
      14  /*
      15  ** cmpge_wide_s8_untied:
      16  **	cmpge	p0\.b, p1/z, z0\.b, z1\.d
      17  **	ret
      18  */
      19  TEST_COMPARE_DUAL_Z (cmpge_wide_s8_untied, svint8_t, svint64_t,
      20  		     p0 = svcmpge_wide_s8 (p1, z0, z1),
      21  		     p0 = svcmpge_wide (p1, z0, z1))
      22  
      23  /*
      24  ** cmpge_wide_x0_s8:
      25  **	mov	(z[0-9]+\.d), x0
      26  **	cmpge	p0\.b, p1/z, z0\.b, \1
      27  **	ret
      28  */
      29  TEST_COMPARE_ZX (cmpge_wide_x0_s8, svint8_t, int64_t,
      30  		 p0 = svcmpge_wide_n_s8 (p1, z0, x0),
      31  		 p0 = svcmpge_wide (p1, z0, x0))
      32  
      33  /*
      34  ** cmpge_wide_0_s8:
      35  **	cmpge	p0\.b, p1/z, z0\.b, #0
      36  **	ret
      37  */
      38  TEST_COMPARE_Z (cmpge_wide_0_s8, svint8_t,
      39  		p0 = svcmpge_wide_n_s8 (p1, z0, 0),
      40  		p0 = svcmpge_wide (p1, z0, 0))
      41  
      42  /*
      43  ** cmpge_wide_1_s8:
      44  **	cmpge	p0\.b, p1/z, z0\.b, #1
      45  **	ret
      46  */
      47  TEST_COMPARE_Z (cmpge_wide_1_s8, svint8_t,
      48  		p0 = svcmpge_wide_n_s8 (p1, z0, 1),
      49  		p0 = svcmpge_wide (p1, z0, 1))
      50  
      51  /*
      52  ** cmpge_wide_15_s8:
      53  **	cmpge	p0\.b, p1/z, z0\.b, #15
      54  **	ret
      55  */
      56  TEST_COMPARE_Z (cmpge_wide_15_s8, svint8_t,
      57  		p0 = svcmpge_wide_n_s8 (p1, z0, 15),
      58  		p0 = svcmpge_wide (p1, z0, 15))
      59  
      60  /*
      61  ** cmpge_wide_16_s8:
      62  **	mov	(z[0-9]+\.d), #16
      63  **	cmpge	p0\.b, p1/z, z0\.b, \1
      64  **	ret
      65  */
      66  TEST_COMPARE_Z (cmpge_wide_16_s8, svint8_t,
      67  		p0 = svcmpge_wide_n_s8 (p1, z0, 16),
      68  		p0 = svcmpge_wide (p1, z0, 16))
      69  
      70  /*
      71  ** cmpge_wide_m1_s8:
      72  **	cmpge	p0\.b, p1/z, z0\.b, #-1
      73  **	ret
      74  */
      75  TEST_COMPARE_Z (cmpge_wide_m1_s8, svint8_t,
      76  		p0 = svcmpge_wide_n_s8 (p1, z0, -1),
      77  		p0 = svcmpge_wide (p1, z0, -1))
      78  
      79  /*
      80  ** cmpge_wide_m16_s8:
      81  **	cmpge	p0\.b, p1/z, z0\.b, #-16
      82  **	ret
      83  */
      84  TEST_COMPARE_Z (cmpge_wide_m16_s8, svint8_t,
      85  		p0 = svcmpge_wide_n_s8 (p1, z0, -16),
      86  		p0 = svcmpge_wide (p1, z0, -16))
      87  
      88  /*
      89  ** cmpge_wide_m17_s8:
      90  **	mov	(z[0-9]+\.d), #-17
      91  **	cmpge	p0\.b, p1/z, z0\.b, \1
      92  **	ret
      93  */
      94  TEST_COMPARE_Z (cmpge_wide_m17_s8, svint8_t,
      95  		p0 = svcmpge_wide_n_s8 (p1, z0, -17),
      96  		p0 = svcmpge_wide (p1, z0, -17))