(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
dup_s8.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** dup_1_s8:
       7  **	mov	z0\.b, #1
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (dup_1_s8, svint8_t,
      11  		z0 = svdup_n_s8 (1),
      12  		z0 = svdup_s8 (1))
      13  
      14  /*
      15  ** dup_127_s8:
      16  **	mov	z0\.b, #127
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (dup_127_s8, svint8_t,
      20  		z0 = svdup_n_s8 (127),
      21  		z0 = svdup_s8 (127))
      22  
      23  /*
      24  ** dup_128_s8:
      25  **	mov	z0\.b, #-128
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (dup_128_s8, svint8_t,
      29  		z0 = svdup_n_s8 (128),
      30  		z0 = svdup_s8 (128))
      31  
      32  /*
      33  ** dup_129_s8:
      34  **	mov	z0\.b, #-127
      35  **	ret
      36  */
      37  TEST_UNIFORM_Z (dup_129_s8, svint8_t,
      38  		z0 = svdup_n_s8 (129),
      39  		z0 = svdup_s8 (129))
      40  
      41  /*
      42  ** dup_253_s8:
      43  **	mov	z0\.b, #-3
      44  **	ret
      45  */
      46  TEST_UNIFORM_Z (dup_253_s8, svint8_t,
      47  		z0 = svdup_n_s8 (253),
      48  		z0 = svdup_s8 (253))
      49  
      50  /*
      51  ** dup_254_s8:
      52  **	mov	z0\.b, #-2
      53  **	ret
      54  */
      55  TEST_UNIFORM_Z (dup_254_s8, svint8_t,
      56  		z0 = svdup_n_s8 (254),
      57  		z0 = svdup_s8 (254))
      58  
      59  /*
      60  ** dup_255_s8:
      61  **	mov	z0\.b, #-1
      62  **	ret
      63  */
      64  TEST_UNIFORM_Z (dup_255_s8, svint8_t,
      65  		z0 = svdup_n_s8 (255),
      66  		z0 = svdup_s8 (255))
      67  
      68  /*
      69  ** dup_m1_s8:
      70  **	mov	z0\.b, #-1
      71  **	ret
      72  */
      73  TEST_UNIFORM_Z (dup_m1_s8, svint8_t,
      74  		z0 = svdup_n_s8 (-1),
      75  		z0 = svdup_s8 (-1))
      76  
      77  /*
      78  ** dup_m128_s8:
      79  **	mov	z0\.b, #-128
      80  **	ret
      81  */
      82  TEST_UNIFORM_Z (dup_m128_s8, svint8_t,
      83  		z0 = svdup_n_s8 (-128),
      84  		z0 = svdup_s8 (-128))
      85  
      86  /*
      87  ** dup_w0_s8:
      88  **	mov	z0\.b, w0
      89  **	ret
      90  */
      91  TEST_UNIFORM_ZX (dup_w0_s8, svint8_t, int8_t,
      92  		 z0 = svdup_n_s8 (x0),
      93  		 z0 = svdup_s8 (x0))
      94  
      95  /*
      96  ** dup_1_s8_m:
      97  **	mov	z0\.b, p0/m, #1
      98  **	ret
      99  */
     100  TEST_UNIFORM_Z (dup_1_s8_m, svint8_t,
     101  		z0 = svdup_n_s8_m (z0, p0, 1),
     102  		z0 = svdup_s8_m (z0, p0, 1))
     103  
     104  /*
     105  ** dup_127_s8_m:
     106  **	mov	z0\.b, p0/m, #127
     107  **	ret
     108  */
     109  TEST_UNIFORM_Z (dup_127_s8_m, svint8_t,
     110  		z0 = svdup_n_s8_m (z0, p0, 127),
     111  		z0 = svdup_s8_m (z0, p0, 127))
     112  
     113  /*
     114  ** dup_128_s8_m:
     115  **	mov	z0\.b, p0/m, #-128
     116  **	ret
     117  */
     118  TEST_UNIFORM_Z (dup_128_s8_m, svint8_t,
     119  		z0 = svdup_n_s8_m (z0, p0, 128),
     120  		z0 = svdup_s8_m (z0, p0, 128))
     121  
     122  /*
     123  ** dup_129_s8_m:
     124  **	mov	z0\.b, p0/m, #-127
     125  **	ret
     126  */
     127  TEST_UNIFORM_Z (dup_129_s8_m, svint8_t,
     128  		z0 = svdup_n_s8_m (z0, p0, 129),
     129  		z0 = svdup_s8_m (z0, p0, 129))
     130  
     131  /*
     132  ** dup_253_s8_m:
     133  **	mov	z0\.b, p0/m, #-3
     134  **	ret
     135  */
     136  TEST_UNIFORM_Z (dup_253_s8_m, svint8_t,
     137  		z0 = svdup_n_s8_m (z0, p0, 253),
     138  		z0 = svdup_s8_m (z0, p0, 253))
     139  
     140  /*
     141  ** dup_254_s8_m:
     142  **	mov	z0\.b, p0/m, #-2
     143  **	ret
     144  */
     145  TEST_UNIFORM_Z (dup_254_s8_m, svint8_t,
     146  		z0 = svdup_n_s8_m (z0, p0, 254),
     147  		z0 = svdup_s8_m (z0, p0, 254))
     148  
     149  /*
     150  ** dup_255_s8_m:
     151  **	mov	z0\.b, p0/m, #-1
     152  **	ret
     153  */
     154  TEST_UNIFORM_Z (dup_255_s8_m, svint8_t,
     155  		z0 = svdup_n_s8_m (z0, p0, 255),
     156  		z0 = svdup_s8_m (z0, p0, 255))
     157  
     158  /*
     159  ** dup_m1_s8_m:
     160  **	mov	z0\.b, p0/m, #-1
     161  **	ret
     162  */
     163  TEST_UNIFORM_Z (dup_m1_s8_m, svint8_t,
     164  		z0 = svdup_n_s8_m (z0, p0, -1),
     165  		z0 = svdup_s8_m (z0, p0, -1))
     166  
     167  /*
     168  ** dup_m128_s8_m:
     169  **	mov	z0\.b, p0/m, #-128
     170  **	ret
     171  */
     172  TEST_UNIFORM_Z (dup_m128_s8_m, svint8_t,
     173  		z0 = svdup_n_s8_m (z0, p0, -128),
     174  		z0 = svdup_s8_m (z0, p0, -128))
     175  
     176  /*
     177  ** dup_0_s8_m:
     178  **	mov	z0\.b, p0/m, #0
     179  **	ret
     180  */
     181  TEST_UNIFORM_Z (dup_0_s8_m, svint8_t,
     182  		z0 = svdup_n_s8_m (z0, p0, 0),
     183  		z0 = svdup_s8_m (z0, p0, 0))
     184  
     185  /*
     186  ** dup_w0_s8_m:
     187  **	movprfx	z0, z1
     188  **	mov	z0\.b, p0/m, w0
     189  **	ret
     190  */
     191  TEST_UNIFORM_ZX (dup_w0_s8_m, svint8_t, int8_t,
     192  		z0 = svdup_n_s8_m (z1, p0, x0),
     193  		z0 = svdup_s8_m (z1, p0, x0))
     194  
     195  /*
     196  ** dup_1_s8_z:
     197  **	mov	z0\.b, p0/z, #1
     198  **	ret
     199  */
     200  TEST_UNIFORM_Z (dup_1_s8_z, svint8_t,
     201  		z0 = svdup_n_s8_z (p0, 1),
     202  		z0 = svdup_s8_z (p0, 1))
     203  
     204  /*
     205  ** dup_127_s8_z:
     206  **	mov	z0\.b, p0/z, #127
     207  **	ret
     208  */
     209  TEST_UNIFORM_Z (dup_127_s8_z, svint8_t,
     210  		z0 = svdup_n_s8_z (p0, 127),
     211  		z0 = svdup_s8_z (p0, 127))
     212  
     213  /*
     214  ** dup_128_s8_z:
     215  **	mov	z0\.b, p0/z, #-128
     216  **	ret
     217  */
     218  TEST_UNIFORM_Z (dup_128_s8_z, svint8_t,
     219  		z0 = svdup_n_s8_z (p0, 128),
     220  		z0 = svdup_s8_z (p0, 128))
     221  
     222  /*
     223  ** dup_129_s8_z:
     224  **	mov	z0\.b, p0/z, #-127
     225  **	ret
     226  */
     227  TEST_UNIFORM_Z (dup_129_s8_z, svint8_t,
     228  		z0 = svdup_n_s8_z (p0, 129),
     229  		z0 = svdup_s8_z (p0, 129))
     230  
     231  /*
     232  ** dup_253_s8_z:
     233  **	mov	z0\.b, p0/z, #-3
     234  **	ret
     235  */
     236  TEST_UNIFORM_Z (dup_253_s8_z, svint8_t,
     237  		z0 = svdup_n_s8_z (p0, 253),
     238  		z0 = svdup_s8_z (p0, 253))
     239  
     240  /*
     241  ** dup_254_s8_z:
     242  **	mov	z0\.b, p0/z, #-2
     243  **	ret
     244  */
     245  TEST_UNIFORM_Z (dup_254_s8_z, svint8_t,
     246  		z0 = svdup_n_s8_z (p0, 254),
     247  		z0 = svdup_s8_z (p0, 254))
     248  
     249  /*
     250  ** dup_255_s8_z:
     251  **	mov	z0\.b, p0/z, #-1
     252  **	ret
     253  */
     254  TEST_UNIFORM_Z (dup_255_s8_z, svint8_t,
     255  		z0 = svdup_n_s8_z (p0, 255),
     256  		z0 = svdup_s8_z (p0, 255))
     257  
     258  /*
     259  ** dup_m1_s8_z:
     260  **	mov	z0\.b, p0/z, #-1
     261  **	ret
     262  */
     263  TEST_UNIFORM_Z (dup_m1_s8_z, svint8_t,
     264  		z0 = svdup_n_s8_z (p0, -1),
     265  		z0 = svdup_s8_z (p0, -1))
     266  
     267  /*
     268  ** dup_m128_s8_z:
     269  **	mov	z0\.b, p0/z, #-128
     270  **	ret
     271  */
     272  TEST_UNIFORM_Z (dup_m128_s8_z, svint8_t,
     273  		z0 = svdup_n_s8_z (p0, -128),
     274  		z0 = svdup_s8_z (p0, -128))
     275  
     276  /*
     277  ** dup_0_s8_z:
     278  **	mov	z0\.[bhsd], #0
     279  **	ret
     280  */
     281  TEST_UNIFORM_Z (dup_0_s8_z, svint8_t,
     282  		z0 = svdup_n_s8_z (p0, 0),
     283  		z0 = svdup_s8_z (p0, 0))
     284  
     285  /*
     286  ** dup_w0_s8_z:
     287  **	movprfx	z0\.b, p0/z, z0\.b
     288  **	mov	z0\.b, p0/m, w0
     289  **	ret
     290  */
     291  TEST_UNIFORM_ZX (dup_w0_s8_z, svint8_t, int8_t,
     292  		z0 = svdup_n_s8_z (p0, x0),
     293  		z0 = svdup_s8_z (p0, x0))
     294  
     295  /*
     296  ** dup_1_s8_x:
     297  **	mov	z0\.b, #1
     298  **	ret
     299  */
     300  TEST_UNIFORM_Z (dup_1_s8_x, svint8_t,
     301  		z0 = svdup_n_s8_x (p0, 1),
     302  		z0 = svdup_s8_x (p0, 1))
     303  
     304  /*
     305  ** dup_127_s8_x:
     306  **	mov	z0\.b, #127
     307  **	ret
     308  */
     309  TEST_UNIFORM_Z (dup_127_s8_x, svint8_t,
     310  		z0 = svdup_n_s8_x (p0, 127),
     311  		z0 = svdup_s8_x (p0, 127))
     312  
     313  /*
     314  ** dup_128_s8_x:
     315  **	mov	z0\.b, #-128
     316  **	ret
     317  */
     318  TEST_UNIFORM_Z (dup_128_s8_x, svint8_t,
     319  		z0 = svdup_n_s8_x (p0, 128),
     320  		z0 = svdup_s8_x (p0, 128))
     321  
     322  /*
     323  ** dup_129_s8_x:
     324  **	mov	z0\.b, #-127
     325  **	ret
     326  */
     327  TEST_UNIFORM_Z (dup_129_s8_x, svint8_t,
     328  		z0 = svdup_n_s8_x (p0, 129),
     329  		z0 = svdup_s8_x (p0, 129))
     330  
     331  /*
     332  ** dup_253_s8_x:
     333  **	mov	z0\.b, #-3
     334  **	ret
     335  */
     336  TEST_UNIFORM_Z (dup_253_s8_x, svint8_t,
     337  		z0 = svdup_n_s8_x (p0, 253),
     338  		z0 = svdup_s8_x (p0, 253))
     339  
     340  /*
     341  ** dup_254_s8_x:
     342  **	mov	z0\.b, #-2
     343  **	ret
     344  */
     345  TEST_UNIFORM_Z (dup_254_s8_x, svint8_t,
     346  		z0 = svdup_n_s8_x (p0, 254),
     347  		z0 = svdup_s8_x (p0, 254))
     348  
     349  /*
     350  ** dup_255_s8_x:
     351  **	mov	z0\.b, #-1
     352  **	ret
     353  */
     354  TEST_UNIFORM_Z (dup_255_s8_x, svint8_t,
     355  		z0 = svdup_n_s8_x (p0, 255),
     356  		z0 = svdup_s8_x (p0, 255))
     357  
     358  /*
     359  ** dup_m1_s8_x:
     360  **	mov	z0\.b, #-1
     361  **	ret
     362  */
     363  TEST_UNIFORM_Z (dup_m1_s8_x, svint8_t,
     364  		z0 = svdup_n_s8_x (p0, -1),
     365  		z0 = svdup_s8_x (p0, -1))
     366  
     367  /*
     368  ** dup_m128_s8_x:
     369  **	mov	z0\.b, #-128
     370  **	ret
     371  */
     372  TEST_UNIFORM_Z (dup_m128_s8_x, svint8_t,
     373  		z0 = svdup_n_s8_x (p0, -128),
     374  		z0 = svdup_s8_x (p0, -128))
     375  
     376  /*
     377  ** dup_w0_s8_x:
     378  **	mov	z0\.b, w0
     379  **	ret
     380  */
     381  TEST_UNIFORM_ZX (dup_w0_s8_x, svint8_t, int8_t,
     382  		z0 = svdup_n_s8_x (p0, x0),
     383  		z0 = svdup_s8_x (p0, x0))