1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** index_s64_x0_x1:
       7  **	index	z0\.d, x0, x1
       8  **	ret
       9  */
      10  TEST_S (index_s64_x0_x1, svint64_t, int64_t,
      11  	z0 = svindex_s64 (x0, x1))
      12  
      13  /*
      14  ** index_s64_x0_2:
      15  **	index	z0\.d, x0, #2
      16  **	ret
      17  */
      18  TEST_S (index_s64_x0_2, svint64_t, int64_t,
      19  	z0 = svindex_s64 (x0, 2))
      20  
      21  /*
      22  ** index_s64_50_2:
      23  **	mov	(x[0-9]+), 50
      24  **	index	z0\.d, \1, #2
      25  **	ret
      26  */
      27  TEST_S (index_s64_50_2, svint64_t, int64_t,
      28  	z0 = svindex_s64 (50, 2))
      29  
      30  /*
      31  ** index_s64_0_m17:
      32  **	mov	(x[0-9]+), -17
      33  **	index	z0\.d, #0, \1
      34  **	ret
      35  */
      36  TEST_S (index_s64_0_m17, svint64_t, int64_t,
      37  	z0 = svindex_s64 (0, -17))
      38  
      39  /*
      40  ** index_s64_0_m16:
      41  **	index	z0\.d, #0, #-16
      42  **	ret
      43  */
      44  TEST_S (index_s64_0_m16, svint64_t, int64_t,
      45  	z0 = svindex_s64 (0, -16))
      46  
      47  /*
      48  ** index_s64_0_1:
      49  **	index	z0\.d, #0, #1
      50  **	ret
      51  */
      52  TEST_S (index_s64_0_1, svint64_t, int64_t,
      53  	z0 = svindex_s64 (0, 1))
      54  
      55  /*
      56  ** index_s64_0_15:
      57  **	index	z0\.d, #0, #15
      58  **	ret
      59  */
      60  TEST_S (index_s64_0_15, svint64_t, int64_t,
      61  	z0 = svindex_s64 (0, 15))
      62  
      63  /*
      64  ** index_s64_0_16:
      65  **	mov	(x[0-9]+), 16
      66  **	index	z0\.d, #0, \1
      67  **	ret
      68  */
      69  TEST_S (index_s64_0_16, svint64_t, int64_t,
      70  	z0 = svindex_s64 (0, 16))
      71  
      72  /*
      73  ** index_s64_m17_1:
      74  **	mov	(x[0-9]+), -17
      75  **	index	z0\.d, \1, #1
      76  **	ret
      77  */
      78  TEST_S (index_s64_m17_1, svint64_t, int64_t,
      79  	z0 = svindex_s64 (-17, 1))
      80  
      81  /*
      82  ** index_s64_m16_1:
      83  **	index	z0\.d, #-16, #1
      84  **	ret
      85  */
      86  TEST_S (index_s64_m16_1, svint64_t, int64_t,
      87  	z0 = svindex_s64 (-16, 1))
      88  
      89  /*
      90  ** index_s64_m1_1:
      91  **	index	z0\.d, #-1, #1
      92  **	ret
      93  */
      94  TEST_S (index_s64_m1_1, svint64_t, int64_t,
      95  	z0 = svindex_s64 (-1, 1))
      96  
      97  /*
      98  ** index_s64_1_1:
      99  **	index	z0\.d, #1, #1
     100  **	ret
     101  */
     102  TEST_S (index_s64_1_1, svint64_t, int64_t,
     103  	z0 = svindex_s64 (1, 1))
     104  
     105  /*
     106  ** index_s64_15_1:
     107  **	index	z0\.d, #15, #1
     108  **	ret
     109  */
     110  TEST_S (index_s64_15_1, svint64_t, int64_t,
     111  	z0 = svindex_s64 (15, 1))
     112  
     113  /*
     114  ** index_s64_16_1:
     115  **	mov	(x[0-9]+), 16
     116  **	index	z0\.d, \1, #1
     117  **	ret
     118  */
     119  TEST_S (index_s64_16_1, svint64_t, int64_t,
     120  	z0 = svindex_s64 (16, 1))
     121  
     122  /*
     123  ** index_s64_m17_x0:
     124  **	mov	(x[0-9]+), -17
     125  **	index	z0\.d, \1, x0
     126  **	ret
     127  */
     128  TEST_S (index_s64_m17_x0, svint64_t, int64_t,
     129  	z0 = svindex_s64 (-17, x0))
     130  
     131  /*
     132  ** index_s64_m16_x0:
     133  **	index	z0\.d, #-16, x0
     134  **	ret
     135  */
     136  TEST_S (index_s64_m16_x0, svint64_t, int64_t,
     137  	z0 = svindex_s64 (-16, x0))
     138  
     139  /*
     140  ** index_s64_m1_x0:
     141  **	index	z0\.d, #-1, x0
     142  **	ret
     143  */
     144  TEST_S (index_s64_m1_x0, svint64_t, int64_t,
     145  	z0 = svindex_s64 (-1, x0))
     146  
     147  /*
     148  ** index_s64_0_x0:
     149  **	index	z0\.d, #0, x0
     150  **	ret
     151  */
     152  TEST_S (index_s64_0_x0, svint64_t, int64_t,
     153  	z0 = svindex_s64 (0, x0))
     154  
     155  /*
     156  ** index_s64_1_x0:
     157  **	index	z0\.d, #1, x0
     158  **	ret
     159  */
     160  TEST_S (index_s64_1_x0, svint64_t, int64_t,
     161  	z0 = svindex_s64 (1, x0))
     162  
     163  /*
     164  ** index_s64_15_x0:
     165  **	index	z0\.d, #15, x0
     166  **	ret
     167  */
     168  TEST_S (index_s64_15_x0, svint64_t, int64_t,
     169  	z0 = svindex_s64 (15, x0))
     170  
     171  /*
     172  ** index_s64_16_x0:
     173  **	mov	(x[0-9]+), 16
     174  **	index	z0\.d, \1, x0
     175  **	ret
     176  */
     177  TEST_S (index_s64_16_x0, svint64_t, int64_t,
     178  	z0 = svindex_s64 (16, x0))
     179  
     180  /*
     181  ** index_s64_x0_m17:
     182  **	mov	(x[0-9]+), -17
     183  **	index	z0\.d, x0, \1
     184  **	ret
     185  */
     186  TEST_S (index_s64_x0_m17, svint64_t, int64_t,
     187  	z0 = svindex_s64 (x0, -17))
     188  
     189  /*
     190  ** index_s64_x0_m16:
     191  **	index	z0\.d, x0, #-16
     192  **	ret
     193  */
     194  TEST_S (index_s64_x0_m16, svint64_t, int64_t,
     195  	z0 = svindex_s64 (x0, -16))
     196  
     197  /*
     198  ** index_s64_x0_1:
     199  **	index	z0\.d, x0, #1
     200  **	ret
     201  */
     202  TEST_S (index_s64_x0_1, svint64_t, int64_t,
     203  	z0 = svindex_s64 (x0, 1))
     204  
     205  /*
     206  ** index_s64_x0_15:
     207  **	index	z0\.d, x0, #15
     208  **	ret
     209  */
     210  TEST_S (index_s64_x0_15, svint64_t, int64_t,
     211  	z0 = svindex_s64 (x0, 15))
     212  
     213  /*
     214  ** index_s64_x0_16:
     215  **	mov	(x[0-9]+), 16
     216  **	index	z0\.d, x0, \1
     217  **	ret
     218  */
     219  TEST_S (index_s64_x0_16, svint64_t, int64_t,
     220  	z0 = svindex_s64 (x0, 16))