(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve2/
acle/
asm/
ldnt1ub_gather_u64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" { target { ! ilp32 } } } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** ldnt1ub_gather_u64_tied1:
       7  **	ldnt1b	z0\.d, p0/z, \[z0\.d\]
       8  **	ret
       9  */
      10  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_u64_tied1, svuint64_t, svuint64_t,
      11  		     z0_res = svldnt1ub_gather_u64base_u64 (p0, z0),
      12  		     z0_res = svldnt1ub_gather_u64 (p0, z0))
      13  
      14  /*
      15  ** ldnt1ub_gather_u64_untied:
      16  **	ldnt1b	z0\.d, p0/z, \[z1\.d\]
      17  **	ret
      18  */
      19  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_u64_untied, svuint64_t, svuint64_t,
      20  		     z0_res = svldnt1ub_gather_u64base_u64 (p0, z1),
      21  		     z0_res = svldnt1ub_gather_u64 (p0, z1))
      22  
      23  /*
      24  ** ldnt1ub_gather_x0_u64_offset:
      25  **	ldnt1b	z0\.d, p0/z, \[z0\.d, x0\]
      26  **	ret
      27  */
      28  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_x0_u64_offset, svuint64_t, svuint64_t,
      29  		     z0_res = svldnt1ub_gather_u64base_offset_u64 (p0, z0, x0),
      30  		     z0_res = svldnt1ub_gather_offset_u64 (p0, z0, x0))
      31  
      32  /*
      33  ** ldnt1ub_gather_m1_u64_offset:
      34  **	mov	(x[0-9]+), #?-1
      35  **	ldnt1b	z0\.d, p0/z, \[z0\.d, \1\]
      36  **	ret
      37  */
      38  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_m1_u64_offset, svuint64_t, svuint64_t,
      39  		     z0_res = svldnt1ub_gather_u64base_offset_u64 (p0, z0, -1),
      40  		     z0_res = svldnt1ub_gather_offset_u64 (p0, z0, -1))
      41  
      42  /*
      43  ** ldnt1ub_gather_0_u64_offset:
      44  **	ldnt1b	z0\.d, p0/z, \[z0\.d\]
      45  **	ret
      46  */
      47  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_0_u64_offset, svuint64_t, svuint64_t,
      48  		     z0_res = svldnt1ub_gather_u64base_offset_u64 (p0, z0, 0),
      49  		     z0_res = svldnt1ub_gather_offset_u64 (p0, z0, 0))
      50  
      51  /*
      52  ** ldnt1ub_gather_5_u64_offset:
      53  **	mov	(x[0-9]+), #?5
      54  **	ldnt1b	z0\.d, p0/z, \[z0\.d, \1\]
      55  **	ret
      56  */
      57  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_5_u64_offset, svuint64_t, svuint64_t,
      58  		     z0_res = svldnt1ub_gather_u64base_offset_u64 (p0, z0, 5),
      59  		     z0_res = svldnt1ub_gather_offset_u64 (p0, z0, 5))
      60  
      61  /*
      62  ** ldnt1ub_gather_31_u64_offset:
      63  **	mov	(x[0-9]+), #?31
      64  **	ldnt1b	z0\.d, p0/z, \[z0\.d, \1\]
      65  **	ret
      66  */
      67  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_31_u64_offset, svuint64_t, svuint64_t,
      68  		     z0_res = svldnt1ub_gather_u64base_offset_u64 (p0, z0, 31),
      69  		     z0_res = svldnt1ub_gather_offset_u64 (p0, z0, 31))
      70  
      71  /*
      72  ** ldnt1ub_gather_32_u64_offset:
      73  **	mov	(x[0-9]+), #?32
      74  **	ldnt1b	z0\.d, p0/z, \[z0\.d, \1\]
      75  **	ret
      76  */
      77  TEST_LOAD_GATHER_ZS (ldnt1ub_gather_32_u64_offset, svuint64_t, svuint64_t,
      78  		     z0_res = svldnt1ub_gather_u64base_offset_u64 (p0, z0, 32),
      79  		     z0_res = svldnt1ub_gather_offset_u64 (p0, z0, 32))
      80  
      81  /*
      82  ** ldnt1ub_gather_x0_u64_s64offset:
      83  **	ldnt1b	z0\.d, p0/z, \[z0\.d, x0\]
      84  **	ret
      85  */
      86  TEST_LOAD_GATHER_SZ (ldnt1ub_gather_x0_u64_s64offset, svuint64_t, uint8_t, svint64_t,
      87  		     z0_res = svldnt1ub_gather_s64offset_u64 (p0, x0, z0),
      88  		     z0_res = svldnt1ub_gather_offset_u64 (p0, x0, z0))
      89  
      90  /*
      91  ** ldnt1ub_gather_tied1_u64_s64offset:
      92  **	ldnt1b	z0\.d, p0/z, \[z0\.d, x0\]
      93  **	ret
      94  */
      95  TEST_LOAD_GATHER_SZ (ldnt1ub_gather_tied1_u64_s64offset, svuint64_t, uint8_t, svint64_t,
      96  		     z0_res = svldnt1ub_gather_s64offset_u64 (p0, x0, z0),
      97  		     z0_res = svldnt1ub_gather_offset_u64 (p0, x0, z0))
      98  
      99  /*
     100  ** ldnt1ub_gather_untied_u64_s64offset:
     101  **	ldnt1b	z0\.d, p0/z, \[z1\.d, x0\]
     102  **	ret
     103  */
     104  TEST_LOAD_GATHER_SZ (ldnt1ub_gather_untied_u64_s64offset, svuint64_t, uint8_t, svint64_t,
     105  		     z0_res = svldnt1ub_gather_s64offset_u64 (p0, x0, z1),
     106  		     z0_res = svldnt1ub_gather_offset_u64 (p0, x0, z1))
     107  
     108  /*
     109  ** ldnt1ub_gather_x0_u64_u64offset:
     110  **	ldnt1b	z0\.d, p0/z, \[z0\.d, x0\]
     111  **	ret
     112  */
     113  TEST_LOAD_GATHER_SZ (ldnt1ub_gather_x0_u64_u64offset, svuint64_t, uint8_t, svuint64_t,
     114  		     z0_res = svldnt1ub_gather_u64offset_u64 (p0, x0, z0),
     115  		     z0_res = svldnt1ub_gather_offset_u64 (p0, x0, z0))
     116  
     117  /*
     118  ** ldnt1ub_gather_tied1_u64_u64offset:
     119  **	ldnt1b	z0\.d, p0/z, \[z0\.d, x0\]
     120  **	ret
     121  */
     122  TEST_LOAD_GATHER_SZ (ldnt1ub_gather_tied1_u64_u64offset, svuint64_t, uint8_t, svuint64_t,
     123  		     z0_res = svldnt1ub_gather_u64offset_u64 (p0, x0, z0),
     124  		     z0_res = svldnt1ub_gather_offset_u64 (p0, x0, z0))
     125  
     126  /*
     127  ** ldnt1ub_gather_untied_u64_u64offset:
     128  **	ldnt1b	z0\.d, p0/z, \[z1\.d, x0\]
     129  **	ret
     130  */
     131  TEST_LOAD_GATHER_SZ (ldnt1ub_gather_untied_u64_u64offset, svuint64_t, uint8_t, svuint64_t,
     132  		     z0_res = svldnt1ub_gather_u64offset_u64 (p0, x0, z1),
     133  		     z0_res = svldnt1ub_gather_offset_u64 (p0, x0, z1))