(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
cmple_s64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** cmple_s64_tied:
       7  ** (
       8  **	cmpge	p0\.d, p0/z, z1\.d, z0\.d
       9  ** |
      10  **	cmple	p0\.d, p0/z, z0\.d, z1\.d
      11  ** )
      12  **	ret
      13  */
      14  TEST_COMPARE_Z (cmple_s64_tied, svint64_t,
      15  		p0 = svcmple_s64 (p0, z0, z1),
      16  		p0 = svcmple (p0, z0, z1))
      17  
      18  /*
      19  ** cmple_s64_untied:
      20  ** (
      21  **	cmpge	p0\.d, p1/z, z1\.d, z0\.d
      22  ** |
      23  **	cmple	p0\.d, p1/z, z0\.d, z1\.d
      24  ** )
      25  **	ret
      26  */
      27  TEST_COMPARE_Z (cmple_s64_untied, svint64_t,
      28  		p0 = svcmple_s64 (p1, z0, z1),
      29  		p0 = svcmple (p1, z0, z1))
      30  
      31  /*
      32  ** cmple_x0_s64:
      33  **	mov	(z[0-9]+\.d), x0
      34  ** (
      35  **	cmpge	p0\.d, p1/z, \1, z0\.d
      36  ** |
      37  **	cmple	p0\.d, p1/z, z0\.d, \1
      38  ** )
      39  **	ret
      40  */
      41  TEST_COMPARE_ZX (cmple_x0_s64, svint64_t, int64_t,
      42  		 p0 = svcmple_n_s64 (p1, z0, x0),
      43  		 p0 = svcmple (p1, z0, x0))
      44  
      45  /*
      46  ** cmple_0_s64:
      47  **	cmple	p0\.d, p1/z, z0\.d, #0
      48  **	ret
      49  */
      50  TEST_COMPARE_Z (cmple_0_s64, svint64_t,
      51  		p0 = svcmple_n_s64 (p1, z0, 0),
      52  		p0 = svcmple (p1, z0, 0))
      53  
      54  /*
      55  ** cmple_1_s64:
      56  **	cmple	p0\.d, p1/z, z0\.d, #1
      57  **	ret
      58  */
      59  TEST_COMPARE_Z (cmple_1_s64, svint64_t,
      60  		p0 = svcmple_n_s64 (p1, z0, 1),
      61  		p0 = svcmple (p1, z0, 1))
      62  
      63  /*
      64  ** cmple_15_s64:
      65  **	cmple	p0\.d, p1/z, z0\.d, #15
      66  **	ret
      67  */
      68  TEST_COMPARE_Z (cmple_15_s64, svint64_t,
      69  		p0 = svcmple_n_s64 (p1, z0, 15),
      70  		p0 = svcmple (p1, z0, 15))
      71  
      72  /*
      73  ** cmple_16_s64:
      74  **	mov	(z[0-9]+\.d), #16
      75  ** (
      76  **	cmpge	p0\.d, p1/z, \1, z0\.d
      77  ** |
      78  **	cmple	p0\.d, p1/z, z0\.d, \1
      79  ** )
      80  **	ret
      81  */
      82  TEST_COMPARE_Z (cmple_16_s64, svint64_t,
      83  		p0 = svcmple_n_s64 (p1, z0, 16),
      84  		p0 = svcmple (p1, z0, 16))
      85  
      86  /*
      87  ** cmple_m1_s64:
      88  **	cmple	p0\.d, p1/z, z0\.d, #-1
      89  **	ret
      90  */
      91  TEST_COMPARE_Z (cmple_m1_s64, svint64_t,
      92  		p0 = svcmple_n_s64 (p1, z0, -1),
      93  		p0 = svcmple (p1, z0, -1))
      94  
      95  /*
      96  ** cmple_m16_s64:
      97  **	cmple	p0\.d, p1/z, z0\.d, #-16
      98  **	ret
      99  */
     100  TEST_COMPARE_Z (cmple_m16_s64, svint64_t,
     101  		p0 = svcmple_n_s64 (p1, z0, -16),
     102  		p0 = svcmple (p1, z0, -16))
     103  
     104  /*
     105  ** cmple_m17_s64:
     106  **	mov	(z[0-9]+\.d), #-17
     107  ** (
     108  **	cmpge	p0\.d, p1/z, \1, z0\.d
     109  ** |
     110  **	cmple	p0\.d, p1/z, z0\.d, \1
     111  ** )
     112  **	ret
     113  */
     114  TEST_COMPARE_Z (cmple_m17_s64, svint64_t,
     115  		p0 = svcmple_n_s64 (p1, z0, -17),
     116  		p0 = svcmple (p1, z0, -17))