(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
ld1ub_s64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" { target { ! ilp32 } } } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** ld1ub_s64_base:
       7  **	ld1b	z0\.d, p0/z, \[x0\]
       8  **	ret
       9  */
      10  TEST_LOAD (ld1ub_s64_base, svint64_t, uint8_t,
      11  	   z0 = svld1ub_s64 (p0, x0),
      12  	   z0 = svld1ub_s64 (p0, x0))
      13  
      14  /*
      15  ** ld1ub_s64_index:
      16  **	ld1b	z0\.d, p0/z, \[x0, x1\]
      17  **	ret
      18  */
      19  TEST_LOAD (ld1ub_s64_index, svint64_t, uint8_t,
      20  	   z0 = svld1ub_s64 (p0, x0 + x1),
      21  	   z0 = svld1ub_s64 (p0, x0 + x1))
      22  
      23  /*
      24  ** ld1ub_s64_1:
      25  **	ld1b	z0\.d, p0/z, \[x0, #1, mul vl\]
      26  **	ret
      27  */
      28  TEST_LOAD (ld1ub_s64_1, svint64_t, uint8_t,
      29  	   z0 = svld1ub_s64 (p0, x0 + svcntd ()),
      30  	   z0 = svld1ub_s64 (p0, x0 + svcntd ()))
      31  
      32  /*
      33  ** ld1ub_s64_7:
      34  **	ld1b	z0\.d, p0/z, \[x0, #7, mul vl\]
      35  **	ret
      36  */
      37  TEST_LOAD (ld1ub_s64_7, svint64_t, uint8_t,
      38  	   z0 = svld1ub_s64 (p0, x0 + svcntd () * 7),
      39  	   z0 = svld1ub_s64 (p0, x0 + svcntd () * 7))
      40  
      41  /* Moving the constant into a register would also be OK.  */
      42  /*
      43  ** ld1ub_s64_8:
      44  **	incb	x0
      45  **	ld1b	z0\.d, p0/z, \[x0\]
      46  **	ret
      47  */
      48  TEST_LOAD (ld1ub_s64_8, svint64_t, uint8_t,
      49  	   z0 = svld1ub_s64 (p0, x0 + svcntd () * 8),
      50  	   z0 = svld1ub_s64 (p0, x0 + svcntd () * 8))
      51  
      52  /*
      53  ** ld1ub_s64_m1:
      54  **	ld1b	z0\.d, p0/z, \[x0, #-1, mul vl\]
      55  **	ret
      56  */
      57  TEST_LOAD (ld1ub_s64_m1, svint64_t, uint8_t,
      58  	   z0 = svld1ub_s64 (p0, x0 - svcntd ()),
      59  	   z0 = svld1ub_s64 (p0, x0 - svcntd ()))
      60  
      61  /*
      62  ** ld1ub_s64_m8:
      63  **	ld1b	z0\.d, p0/z, \[x0, #-8, mul vl\]
      64  **	ret
      65  */
      66  TEST_LOAD (ld1ub_s64_m8, svint64_t, uint8_t,
      67  	   z0 = svld1ub_s64 (p0, x0 - svcntd () * 8),
      68  	   z0 = svld1ub_s64 (p0, x0 - svcntd () * 8))
      69  
      70  /* Moving the constant into a register would also be OK.  */
      71  /*
      72  ** ld1ub_s64_m9:
      73  **	decd	x0, all, mul #9
      74  **	ld1b	z0\.d, p0/z, \[x0\]
      75  **	ret
      76  */
      77  TEST_LOAD (ld1ub_s64_m9, svint64_t, uint8_t,
      78  	   z0 = svld1ub_s64 (p0, x0 - svcntd () * 9),
      79  	   z0 = svld1ub_s64 (p0, x0 - svcntd () * 9))
      80  
      81  /*
      82  ** ld1ub_vnum_s64_0:
      83  **	ld1b	z0\.d, p0/z, \[x0\]
      84  **	ret
      85  */
      86  TEST_LOAD (ld1ub_vnum_s64_0, svint64_t, uint8_t,
      87  	   z0 = svld1ub_vnum_s64 (p0, x0, 0),
      88  	   z0 = svld1ub_vnum_s64 (p0, x0, 0))
      89  
      90  /*
      91  ** ld1ub_vnum_s64_1:
      92  **	ld1b	z0\.d, p0/z, \[x0, #1, mul vl\]
      93  **	ret
      94  */
      95  TEST_LOAD (ld1ub_vnum_s64_1, svint64_t, uint8_t,
      96  	   z0 = svld1ub_vnum_s64 (p0, x0, 1),
      97  	   z0 = svld1ub_vnum_s64 (p0, x0, 1))
      98  
      99  /*
     100  ** ld1ub_vnum_s64_7:
     101  **	ld1b	z0\.d, p0/z, \[x0, #7, mul vl\]
     102  **	ret
     103  */
     104  TEST_LOAD (ld1ub_vnum_s64_7, svint64_t, uint8_t,
     105  	   z0 = svld1ub_vnum_s64 (p0, x0, 7),
     106  	   z0 = svld1ub_vnum_s64 (p0, x0, 7))
     107  
     108  /* Moving the constant into a register would also be OK.  */
     109  /*
     110  ** ld1ub_vnum_s64_8:
     111  **	incb	x0
     112  **	ld1b	z0\.d, p0/z, \[x0\]
     113  **	ret
     114  */
     115  TEST_LOAD (ld1ub_vnum_s64_8, svint64_t, uint8_t,
     116  	   z0 = svld1ub_vnum_s64 (p0, x0, 8),
     117  	   z0 = svld1ub_vnum_s64 (p0, x0, 8))
     118  
     119  /*
     120  ** ld1ub_vnum_s64_m1:
     121  **	ld1b	z0\.d, p0/z, \[x0, #-1, mul vl\]
     122  **	ret
     123  */
     124  TEST_LOAD (ld1ub_vnum_s64_m1, svint64_t, uint8_t,
     125  	   z0 = svld1ub_vnum_s64 (p0, x0, -1),
     126  	   z0 = svld1ub_vnum_s64 (p0, x0, -1))
     127  
     128  /*
     129  ** ld1ub_vnum_s64_m8:
     130  **	ld1b	z0\.d, p0/z, \[x0, #-8, mul vl\]
     131  **	ret
     132  */
     133  TEST_LOAD (ld1ub_vnum_s64_m8, svint64_t, uint8_t,
     134  	   z0 = svld1ub_vnum_s64 (p0, x0, -8),
     135  	   z0 = svld1ub_vnum_s64 (p0, x0, -8))
     136  
     137  /* Moving the constant into a register would also be OK.  */
     138  /*
     139  ** ld1ub_vnum_s64_m9:
     140  **	decd	x0, all, mul #9
     141  **	ld1b	z0\.d, p0/z, \[x0\]
     142  **	ret
     143  */
     144  TEST_LOAD (ld1ub_vnum_s64_m9, svint64_t, uint8_t,
     145  	   z0 = svld1ub_vnum_s64 (p0, x0, -9),
     146  	   z0 = svld1ub_vnum_s64 (p0, x0, -9))
     147  
     148  /*
     149  ** ld1ub_vnum_s64_x1:
     150  **	cntd	(x[0-9]+)
     151  ** (
     152  **	madd	(x[0-9]+), (?:x1, \1|\1, x1), x0
     153  **	ld1b	z0\.d, p0/z, \[\2\]
     154  ** |
     155  **	mul	(x[0-9]+), (?:x1, \1|\1, x1)
     156  **	ld1b	z0\.d, p0/z, \[x0, \3\]
     157  ** )
     158  **	ret
     159  */
     160  TEST_LOAD (ld1ub_vnum_s64_x1, svint64_t, uint8_t,
     161  	   z0 = svld1ub_vnum_s64 (p0, x0, x1),
     162  	   z0 = svld1ub_vnum_s64 (p0, x0, x1))