1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** asrd_1_s64_m_tied1:
       7  **	asrd	z0\.d, p0/m, z0\.d, #1
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (asrd_1_s64_m_tied1, svint64_t,
      11  		z0 = svasrd_n_s64_m (p0, z0, 1),
      12  		z0 = svasrd_m (p0, z0, 1))
      13  
      14  /*
      15  ** asrd_1_s64_m_untied:
      16  **	movprfx	z0, z1
      17  **	asrd	z0\.d, p0/m, z0\.d, #1
      18  **	ret
      19  */
      20  TEST_UNIFORM_Z (asrd_1_s64_m_untied, svint64_t,
      21  		z0 = svasrd_n_s64_m (p0, z1, 1),
      22  		z0 = svasrd_m (p0, z1, 1))
      23  
      24  /*
      25  ** asrd_2_s64_m_tied1:
      26  **	asrd	z0\.d, p0/m, z0\.d, #2
      27  **	ret
      28  */
      29  TEST_UNIFORM_Z (asrd_2_s64_m_tied1, svint64_t,
      30  		z0 = svasrd_n_s64_m (p0, z0, 2),
      31  		z0 = svasrd_m (p0, z0, 2))
      32  
      33  /*
      34  ** asrd_2_s64_m_untied:
      35  **	movprfx	z0, z1
      36  **	asrd	z0\.d, p0/m, z0\.d, #2
      37  **	ret
      38  */
      39  TEST_UNIFORM_Z (asrd_2_s64_m_untied, svint64_t,
      40  		z0 = svasrd_n_s64_m (p0, z1, 2),
      41  		z0 = svasrd_m (p0, z1, 2))
      42  
      43  /*
      44  ** asrd_64_s64_m_tied1:
      45  **	asrd	z0\.d, p0/m, z0\.d, #64
      46  **	ret
      47  */
      48  TEST_UNIFORM_Z (asrd_64_s64_m_tied1, svint64_t,
      49  		z0 = svasrd_n_s64_m (p0, z0, 64),
      50  		z0 = svasrd_m (p0, z0, 64))
      51  
      52  /*
      53  ** asrd_64_s64_m_untied:
      54  **	movprfx	z0, z1
      55  **	asrd	z0\.d, p0/m, z0\.d, #64
      56  **	ret
      57  */
      58  TEST_UNIFORM_Z (asrd_64_s64_m_untied, svint64_t,
      59  		z0 = svasrd_n_s64_m (p0, z1, 64),
      60  		z0 = svasrd_m (p0, z1, 64))
      61  
      62  /*
      63  ** asrd_1_s64_z_tied1:
      64  **	movprfx	z0\.d, p0/z, z0\.d
      65  **	asrd	z0\.d, p0/m, z0\.d, #1
      66  **	ret
      67  */
      68  TEST_UNIFORM_Z (asrd_1_s64_z_tied1, svint64_t,
      69  		z0 = svasrd_n_s64_z (p0, z0, 1),
      70  		z0 = svasrd_z (p0, z0, 1))
      71  
      72  /*
      73  ** asrd_1_s64_z_untied:
      74  **	movprfx	z0\.d, p0/z, z1\.d
      75  **	asrd	z0\.d, p0/m, z0\.d, #1
      76  **	ret
      77  */
      78  TEST_UNIFORM_Z (asrd_1_s64_z_untied, svint64_t,
      79  		z0 = svasrd_n_s64_z (p0, z1, 1),
      80  		z0 = svasrd_z (p0, z1, 1))
      81  
      82  /*
      83  ** asrd_2_s64_z_tied1:
      84  **	movprfx	z0\.d, p0/z, z0\.d
      85  **	asrd	z0\.d, p0/m, z0\.d, #2
      86  **	ret
      87  */
      88  TEST_UNIFORM_Z (asrd_2_s64_z_tied1, svint64_t,
      89  		z0 = svasrd_n_s64_z (p0, z0, 2),
      90  		z0 = svasrd_z (p0, z0, 2))
      91  
      92  /*
      93  ** asrd_2_s64_z_untied:
      94  **	movprfx	z0\.d, p0/z, z1\.d
      95  **	asrd	z0\.d, p0/m, z0\.d, #2
      96  **	ret
      97  */
      98  TEST_UNIFORM_Z (asrd_2_s64_z_untied, svint64_t,
      99  		z0 = svasrd_n_s64_z (p0, z1, 2),
     100  		z0 = svasrd_z (p0, z1, 2))
     101  
     102  /*
     103  ** asrd_64_s64_z_tied1:
     104  **	movprfx	z0\.d, p0/z, z0\.d
     105  **	asrd	z0\.d, p0/m, z0\.d, #64
     106  **	ret
     107  */
     108  TEST_UNIFORM_Z (asrd_64_s64_z_tied1, svint64_t,
     109  		z0 = svasrd_n_s64_z (p0, z0, 64),
     110  		z0 = svasrd_z (p0, z0, 64))
     111  
     112  /*
     113  ** asrd_64_s64_z_untied:
     114  **	movprfx	z0\.d, p0/z, z1\.d
     115  **	asrd	z0\.d, p0/m, z0\.d, #64
     116  **	ret
     117  */
     118  TEST_UNIFORM_Z (asrd_64_s64_z_untied, svint64_t,
     119  		z0 = svasrd_n_s64_z (p0, z1, 64),
     120  		z0 = svasrd_z (p0, z1, 64))
     121  
     122  /*
     123  ** asrd_1_s64_x_tied1:
     124  **	asrd	z0\.d, p0/m, z0\.d, #1
     125  **	ret
     126  */
     127  TEST_UNIFORM_Z (asrd_1_s64_x_tied1, svint64_t,
     128  		z0 = svasrd_n_s64_x (p0, z0, 1),
     129  		z0 = svasrd_x (p0, z0, 1))
     130  
     131  /*
     132  ** asrd_1_s64_x_untied:
     133  **	movprfx	z0, z1
     134  **	asrd	z0\.d, p0/m, z0\.d, #1
     135  **	ret
     136  */
     137  TEST_UNIFORM_Z (asrd_1_s64_x_untied, svint64_t,
     138  		z0 = svasrd_n_s64_x (p0, z1, 1),
     139  		z0 = svasrd_x (p0, z1, 1))
     140  
     141  /*
     142  ** asrd_2_s64_x_tied1:
     143  **	asrd	z0\.d, p0/m, z0\.d, #2
     144  **	ret
     145  */
     146  TEST_UNIFORM_Z (asrd_2_s64_x_tied1, svint64_t,
     147  		z0 = svasrd_n_s64_x (p0, z0, 2),
     148  		z0 = svasrd_x (p0, z0, 2))
     149  
     150  /*
     151  ** asrd_2_s64_x_untied:
     152  **	movprfx	z0, z1
     153  **	asrd	z0\.d, p0/m, z0\.d, #2
     154  **	ret
     155  */
     156  TEST_UNIFORM_Z (asrd_2_s64_x_untied, svint64_t,
     157  		z0 = svasrd_n_s64_x (p0, z1, 2),
     158  		z0 = svasrd_x (p0, z1, 2))
     159  
     160  /*
     161  ** asrd_64_s64_x_tied1:
     162  **	asrd	z0\.d, p0/m, z0\.d, #64
     163  **	ret
     164  */
     165  TEST_UNIFORM_Z (asrd_64_s64_x_tied1, svint64_t,
     166  		z0 = svasrd_n_s64_x (p0, z0, 64),
     167  		z0 = svasrd_x (p0, z0, 64))
     168  
     169  /*
     170  ** asrd_64_s64_x_untied:
     171  **	movprfx	z0, z1
     172  **	asrd	z0\.d, p0/m, z0\.d, #64
     173  **	ret
     174  */
     175  TEST_UNIFORM_Z (asrd_64_s64_x_untied, svint64_t,
     176  		z0 = svasrd_n_s64_x (p0, z1, 64),
     177  		z0 = svasrd_x (p0, z1, 64))