(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
cntp_b64.c
       1  /* { dg-additional-options "-msve-vector-bits=scalable" } */
       2  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       3  
       4  #include "test_sve_acle.h"
       5  #include <stdbool.h>
       6  
       7  /*
       8  ** cnt_b64_32:
       9  **	cntp	x0, p0, p1\.d
      10  **	ret
      11  */
      12  TEST_PTEST (cnt_b64_32, uint32_t,
      13  	    x0 = svcntp_b64 (p0, p1));
      14  
      15  /*
      16  ** cnt_b64_64:
      17  **	cntp	x0, p0, p1\.d
      18  **	ret
      19  */
      20  TEST_PTEST (cnt_b64_64, uint64_t,
      21  	    x0 = svcntp_b64 (p0, p1));
      22  
      23  /*
      24  ** inc_b64_32_general_x0:
      25  **	cntp	x([0-9]+), p0, p1\.d
      26  **	add	w0, (w0, w\1|w\1, w0)
      27  **	ret
      28  */
      29  TEST_PTEST (inc_b64_32_general_x0, uint32_t,
      30  	    x0 += svcntp_b64 (p0, p1));
      31  
      32  /*
      33  ** inc_b64_32_general_x1:
      34  **	cntp	x([0-9]+), p0, p1\.d
      35  **	add	w0, (w1, w\1|w\1, w1)
      36  **	ret
      37  */
      38  TEST_PTEST (inc_b64_32_general_x1, uint32_t,
      39  	    x0 = x1 + svcntp_b64 (p0, p1));
      40  
      41  /*
      42  ** inc_b64_32_ptrue_x0:
      43  **	incp	x0, p1\.d
      44  **	ret
      45  */
      46  TEST_PTEST (inc_b64_32_ptrue_x0, uint32_t,
      47  	    x0 += svcntp_b64 (svptrue_b64 (), p1));
      48  
      49  /*
      50  ** inc_b64_32_ptrue_x1:
      51  **	mov	w0, w1
      52  **	incp	x0, p1\.d
      53  **	ret
      54  */
      55  TEST_PTEST (inc_b64_32_ptrue_x1, uint32_t,
      56  	    x0 = x1 + svcntp_b64 (svptrue_b64 (), p1));
      57  
      58  /*
      59  ** inc_b64_64_general_x0:
      60  **	cntp	(x[0-9]+), p0, p1\.d
      61  **	add	x0, (x0, \1|\1, x0)
      62  **	ret
      63  */
      64  TEST_PTEST (inc_b64_64_general_x0, uint64_t,
      65  	    x0 += svcntp_b64 (p0, p1));
      66  
      67  /*
      68  ** inc_b64_64_general_x1:
      69  **	cntp	(x[0-9]+), p0, p1\.d
      70  **	add	x0, (x1, \1|\1, x1)
      71  **	ret
      72  */
      73  TEST_PTEST (inc_b64_64_general_x1, uint64_t,
      74  	    x0 = x1 + svcntp_b64 (p0, p1));
      75  
      76  /*
      77  ** inc_b64_64_ptrue_x0:
      78  **	incp	x0, p1\.d
      79  **	ret
      80  */
      81  TEST_PTEST (inc_b64_64_ptrue_x0, uint64_t,
      82  	    x0 += svcntp_b64 (svptrue_b64 (), p1));
      83  
      84  /*
      85  ** inc_b64_64_ptrue_x1:
      86  **	mov	x0, x1
      87  **	incp	x0, p1\.d
      88  **	ret
      89  */
      90  TEST_PTEST (inc_b64_64_ptrue_x1, uint64_t,
      91  	    x0 = x1 + svcntp_b64 (svptrue_b64 (), p1));
      92  
      93  /*
      94  ** dec_b64_32_general_x0:
      95  **	cntp	x([0-9]+), p0, p1\.d
      96  **	sub	w0, w0, w\1
      97  **	ret
      98  */
      99  TEST_PTEST (dec_b64_32_general_x0, uint32_t,
     100  	    x0 -= svcntp_b64 (p0, p1));
     101  
     102  /*
     103  ** dec_b64_32_general_x1:
     104  **	cntp	x([0-9]+), p0, p1\.d
     105  **	sub	w0, w1, w\1
     106  **	ret
     107  */
     108  TEST_PTEST (dec_b64_32_general_x1, uint32_t,
     109  	    x0 = x1 - svcntp_b64 (p0, p1));
     110  
     111  /*
     112  ** dec_b64_32_ptrue_x0:
     113  **	decp	x0, p1\.d
     114  **	ret
     115  */
     116  TEST_PTEST (dec_b64_32_ptrue_x0, uint32_t,
     117  	    x0 -= svcntp_b64 (svptrue_b64 (), p1));
     118  
     119  /*
     120  ** dec_b64_32_ptrue_x1:
     121  **	mov	w0, w1
     122  **	decp	x0, p1\.d
     123  **	ret
     124  */
     125  TEST_PTEST (dec_b64_32_ptrue_x1, uint32_t,
     126  	    x0 = x1 - svcntp_b64 (svptrue_b64 (), p1));
     127  
     128  /*
     129  ** dec_b64_64_general_x0:
     130  **	cntp	(x[0-9]+), p0, p1\.d
     131  **	sub	x0, x0, \1
     132  **	ret
     133  */
     134  TEST_PTEST (dec_b64_64_general_x0, uint64_t,
     135  	    x0 -= svcntp_b64 (p0, p1));
     136  
     137  /*
     138  ** dec_b64_64_general_x1:
     139  **	cntp	(x[0-9]+), p0, p1\.d
     140  **	sub	x0, x1, \1
     141  **	ret
     142  */
     143  TEST_PTEST (dec_b64_64_general_x1, uint64_t,
     144  	    x0 = x1 - svcntp_b64 (p0, p1));
     145  
     146  /*
     147  ** dec_b64_64_ptrue_x0:
     148  **	decp	x0, p1\.d
     149  **	ret
     150  */
     151  TEST_PTEST (dec_b64_64_ptrue_x0, uint64_t,
     152  	    x0 -= svcntp_b64 (svptrue_b64 (), p1));
     153  
     154  /*
     155  ** dec_b64_64_ptrue_x1:
     156  **	mov	x0, x1
     157  **	decp	x0, p1\.d
     158  **	ret
     159  */
     160  TEST_PTEST (dec_b64_64_ptrue_x1, uint64_t,
     161  	    x0 = x1 - svcntp_b64 (svptrue_b64 (), p1));
     162  
     163  /*
     164  ** inc_b64_u64_general_z0:
     165  **	cntp	(x[0-9]+), p0, p1\.d
     166  **	mov	(z[0-9]+\.d), \1
     167  **	add	z0\.d, (z0\.d, \2|\2, z0\.d)
     168  **	ret
     169  */
     170  TEST_UNIFORM_Z (inc_b64_u64_general_z0, svuint64_t,
     171  		z0 = svadd_n_u64_x (svptrue_b64 (), z0, svcntp_b64 (p0, p1)),
     172  		z0 = svadd_x (svptrue_b64 (), z0, svcntp_b64 (p0, p1)));
     173  
     174  /*
     175  ** inc_b64_u64_general_z1:
     176  **	cntp	(x[0-9]+), p0, p1\.d
     177  **	mov	(z[0-9]+\.d), \1
     178  **	add	z0\.d, (z1\.d, \2|\2, z1\.d)
     179  **	ret
     180  */
     181  TEST_UNIFORM_Z (inc_b64_u64_general_z1, svuint64_t,
     182  		z0 = svadd_n_u64_x (svptrue_b64 (), z1, svcntp_b64 (p0, p1)),
     183  		z0 = svadd_x (svptrue_b64 (), z1, svcntp_b64 (p0, p1)));
     184  
     185  /*
     186  ** inc_b64_u64_ptrue_z0:
     187  **	incp	z0\.d, p0
     188  **	ret
     189  */
     190  TEST_UNIFORM_Z (inc_b64_u64_ptrue_z0, svuint64_t,
     191  		z0 = svadd_n_u64_x (svptrue_b64 (), z0, svcntp_b64 (svptrue_b64 (), p0)),
     192  		z0 = svadd_x (svptrue_b64 (), z0, svcntp_b64 (svptrue_b64 (), p0)));
     193  
     194  /*
     195  ** inc_b64_u64_ptrue_z1:
     196  **	movprfx	z0, z1
     197  **	incp	z0\.d, p0
     198  **	ret
     199  */
     200  TEST_UNIFORM_Z (inc_b64_u64_ptrue_z1, svuint64_t,
     201  		z0 = svadd_n_u64_x (svptrue_b64 (), z1, svcntp_b64 (svptrue_b64 (), p0)),
     202  		z0 = svadd_x (svptrue_b64 (), z1, svcntp_b64 (svptrue_b64 (), p0)));
     203  
     204  /*
     205  ** dec_b64_u64_general_z0:
     206  **	cntp	(x[0-9]+), p0, p1\.d
     207  **	mov	(z[0-9]+\.d), \1
     208  **	sub	z0\.d, z0\.d, \2
     209  **	ret
     210  */
     211  TEST_UNIFORM_Z (dec_b64_u64_general_z0, svuint64_t,
     212  		z0 = svsub_n_u64_x (svptrue_b64 (), z0, svcntp_b64 (p0, p1)),
     213  		z0 = svsub_x (svptrue_b64 (), z0, svcntp_b64 (p0, p1)));
     214  
     215  /*
     216  ** dec_b64_u64_general_z1:
     217  **	cntp	(x[0-9]+), p0, p1\.d
     218  **	mov	(z[0-9]+\.d), \1
     219  **	sub	z0\.d, z1\.d, \2
     220  **	ret
     221  */
     222  TEST_UNIFORM_Z (dec_b64_u64_general_z1, svuint64_t,
     223  		z0 = svsub_n_u64_x (svptrue_b64 (), z1, svcntp_b64 (p0, p1)),
     224  		z0 = svsub_x (svptrue_b64 (), z1, svcntp_b64 (p0, p1)));
     225  
     226  /*
     227  ** dec_b64_u64_ptrue_z0:
     228  **	decp	z0\.d, p0
     229  **	ret
     230  */
     231  TEST_UNIFORM_Z (dec_b64_u64_ptrue_z0, svuint64_t,
     232  		z0 = svsub_n_u64_x (svptrue_b64 (), z0, svcntp_b64 (svptrue_b64 (), p0)),
     233  		z0 = svsub_x (svptrue_b64 (), z0, svcntp_b64 (svptrue_b64 (), p0)));
     234  
     235  /*
     236  ** dec_b64_u64_ptrue_z1:
     237  **	movprfx	z0, z1
     238  **	decp	z0\.d, p0
     239  **	ret
     240  */
     241  TEST_UNIFORM_Z (dec_b64_u64_ptrue_z1, svuint64_t,
     242  		z0 = svsub_n_u64_x (svptrue_b64 (), z1, svcntp_b64 (svptrue_b64 (), p0)),
     243  		z0 = svsub_x (svptrue_b64 (), z1, svcntp_b64 (svptrue_b64 (), p0)));