1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** dup_1_u64:
       7  **	mov	z0\.d, #1
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (dup_1_u64, svuint64_t,
      11  		z0 = svdup_n_u64 (1),
      12  		z0 = svdup_u64 (1))
      13  
      14  /*
      15  ** dup_127_u64:
      16  **	mov	z0\.d, #127
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (dup_127_u64, svuint64_t,
      20  		z0 = svdup_n_u64 (127),
      21  		z0 = svdup_u64 (127))
      22  
      23  /*
      24  ** dup_128_u64:
      25  **	mov	z0\.d, #128
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (dup_128_u64, svuint64_t,
      29  		z0 = svdup_n_u64 (128),
      30  		z0 = svdup_u64 (128))
      31  
      32  /*
      33  ** dup_129_u64:
      34  **	mov	(x[0-9]+), 129
      35  **	mov	z0\.d, \1
      36  **	ret
      37  */
      38  TEST_UNIFORM_Z (dup_129_u64, svuint64_t,
      39  		z0 = svdup_n_u64 (129),
      40  		z0 = svdup_u64 (129))
      41  
      42  /*
      43  ** dup_253_u64:
      44  **	mov	(x[0-9]+), 253
      45  **	mov	z0\.d, \1
      46  **	ret
      47  */
      48  TEST_UNIFORM_Z (dup_253_u64, svuint64_t,
      49  		z0 = svdup_n_u64 (253),
      50  		z0 = svdup_u64 (253))
      51  
      52  /*
      53  ** dup_254_u64:
      54  **	mov	z0\.d, #254
      55  **	ret
      56  */
      57  TEST_UNIFORM_Z (dup_254_u64, svuint64_t,
      58  		z0 = svdup_n_u64 (254),
      59  		z0 = svdup_u64 (254))
      60  
      61  /*
      62  ** dup_255_u64:
      63  **	mov	z0\.d, #255
      64  **	ret
      65  */
      66  TEST_UNIFORM_Z (dup_255_u64, svuint64_t,
      67  		z0 = svdup_n_u64 (255),
      68  		z0 = svdup_u64 (255))
      69  
      70  /*
      71  ** dup_256_u64:
      72  **	mov	z0\.d, #256
      73  **	ret
      74  */
      75  TEST_UNIFORM_Z (dup_256_u64, svuint64_t,
      76  		z0 = svdup_n_u64 (256),
      77  		z0 = svdup_u64 (256))
      78  
      79  /*
      80  ** dup_257_u64:
      81  **	mov	(x[0-9]+), 257
      82  **	mov	z0\.d, \1
      83  **	ret
      84  */
      85  TEST_UNIFORM_Z (dup_257_u64, svuint64_t,
      86  		z0 = svdup_n_u64 (257),
      87  		z0 = svdup_u64 (257))
      88  
      89  /*
      90  ** dup_512_u64:
      91  **	mov	z0\.d, #512
      92  **	ret
      93  */
      94  TEST_UNIFORM_Z (dup_512_u64, svuint64_t,
      95  		z0 = svdup_n_u64 (512),
      96  		z0 = svdup_u64 (512))
      97  
      98  /*
      99  ** dup_7f00_u64:
     100  **	mov	z0\.d, #32512
     101  **	ret
     102  */
     103  TEST_UNIFORM_Z (dup_7f00_u64, svuint64_t,
     104  		z0 = svdup_n_u64 (0x7f00),
     105  		z0 = svdup_u64 (0x7f00))
     106  
     107  /*
     108  ** dup_7f01_u64:
     109  **	mov	(x[0-9]+), 32513
     110  **	mov	z0\.d, \1
     111  **	ret
     112  */
     113  TEST_UNIFORM_Z (dup_7f01_u64, svuint64_t,
     114  		z0 = svdup_n_u64 (0x7f01),
     115  		z0 = svdup_u64 (0x7f01))
     116  
     117  /*
     118  ** dup_7ffd_u64:
     119  **	mov	(x[0-9]+), 32765
     120  **	mov	z0\.d, \1
     121  **	ret
     122  */
     123  TEST_UNIFORM_Z (dup_7ffd_u64, svuint64_t,
     124  		z0 = svdup_n_u64 (0x7ffd),
     125  		z0 = svdup_u64 (0x7ffd))
     126  
     127  /*
     128  ** dup_7ffe_u64:
     129  **	mov	z0\.d, #32766
     130  **	ret
     131  */
     132  TEST_UNIFORM_Z (dup_7ffe_u64, svuint64_t,
     133  		z0 = svdup_n_u64 (0x7ffe),
     134  		z0 = svdup_u64 (0x7ffe))
     135  
     136  /*
     137  ** dup_7fff_u64:
     138  **	mov	z0\.d, #32767
     139  **	ret
     140  */
     141  TEST_UNIFORM_Z (dup_7fff_u64, svuint64_t,
     142  		z0 = svdup_n_u64 (0x7fff),
     143  		z0 = svdup_u64 (0x7fff))
     144  
     145  /*
     146  ** dup_m1_u64:
     147  **	mov	z0\.b, #-1
     148  **	ret
     149  */
     150  TEST_UNIFORM_Z (dup_m1_u64, svuint64_t,
     151  		z0 = svdup_n_u64 (-1),
     152  		z0 = svdup_u64 (-1))
     153  
     154  /*
     155  ** dup_m128_u64:
     156  **	mov	z0\.d, #-128
     157  **	ret
     158  */
     159  TEST_UNIFORM_Z (dup_m128_u64, svuint64_t,
     160  		z0 = svdup_n_u64 (-128),
     161  		z0 = svdup_u64 (-128))
     162  
     163  /*
     164  ** dup_m129_u64:
     165  **	mov	z0\.d, #-129
     166  **	ret
     167  */
     168  TEST_UNIFORM_Z (dup_m129_u64, svuint64_t,
     169  		z0 = svdup_n_u64 (-129),
     170  		z0 = svdup_u64 (-129))
     171  
     172  /*
     173  ** dup_m130_u64:
     174  **	mov	(x[0-9]+), -130
     175  **	mov	z0\.d, \1
     176  **	ret
     177  */
     178  TEST_UNIFORM_Z (dup_m130_u64, svuint64_t,
     179  		z0 = svdup_n_u64 (-130),
     180  		z0 = svdup_u64 (-130))
     181  
     182  /*
     183  ** dup_m254_u64:
     184  **	mov	(x[0-9]+), -254
     185  **	mov	z0\.d, \1
     186  **	ret
     187  */
     188  TEST_UNIFORM_Z (dup_m254_u64, svuint64_t,
     189  		z0 = svdup_n_u64 (-254),
     190  		z0 = svdup_u64 (-254))
     191  
     192  /*
     193  ** dup_m255_u64:
     194  **	mov	z0\.d, #-255
     195  **	ret
     196  */
     197  TEST_UNIFORM_Z (dup_m255_u64, svuint64_t,
     198  		z0 = svdup_n_u64 (-255),
     199  		z0 = svdup_u64 (-255))
     200  
     201  /*
     202  ** dup_m256_u64:
     203  **	mov	z0\.d, #-256
     204  **	ret
     205  */
     206  TEST_UNIFORM_Z (dup_m256_u64, svuint64_t,
     207  		z0 = svdup_n_u64 (-256),
     208  		z0 = svdup_u64 (-256))
     209  
     210  /*
     211  ** dup_m257_u64:
     212  **	mov	z0\.d, #-257
     213  **	ret
     214  */
     215  TEST_UNIFORM_Z (dup_m257_u64, svuint64_t,
     216  		z0 = svdup_n_u64 (-257),
     217  		z0 = svdup_u64 (-257))
     218  
     219  /*
     220  ** dup_m258_u64:
     221  **	mov	(x[0-9]+), -258
     222  **	mov	z0\.d, \1
     223  **	ret
     224  */
     225  TEST_UNIFORM_Z (dup_m258_u64, svuint64_t,
     226  		z0 = svdup_n_u64 (-258),
     227  		z0 = svdup_u64 (-258))
     228  
     229  /*
     230  ** dup_m259_u64:
     231  **	mov	(x[0-9]+), -259
     232  **	mov	z0\.d, \1
     233  **	ret
     234  */
     235  TEST_UNIFORM_Z (dup_m259_u64, svuint64_t,
     236  		z0 = svdup_n_u64 (-259),
     237  		z0 = svdup_u64 (-259))
     238  
     239  /*
     240  ** dup_m512_u64:
     241  **	mov	z0\.d, #-512
     242  **	ret
     243  */
     244  TEST_UNIFORM_Z (dup_m512_u64, svuint64_t,
     245  		z0 = svdup_n_u64 (-512),
     246  		z0 = svdup_u64 (-512))
     247  
     248  /*
     249  ** dup_m7f00_u64:
     250  **	mov	z0\.d, #-32512
     251  **	ret
     252  */
     253  TEST_UNIFORM_Z (dup_m7f00_u64, svuint64_t,
     254  		z0 = svdup_n_u64 (-0x7f00),
     255  		z0 = svdup_u64 (-0x7f00))
     256  
     257  /*
     258  ** dup_m7f01_u64:
     259  **	mov	z0\.d, #-32513
     260  **	ret
     261  */
     262  TEST_UNIFORM_Z (dup_m7f01_u64, svuint64_t,
     263  		z0 = svdup_n_u64 (-0x7f01),
     264  		z0 = svdup_u64 (-0x7f01))
     265  
     266  /*
     267  ** dup_m7f02_u64:
     268  **	mov	(x[0-9]+), -32514
     269  **	mov	z0\.d, \1
     270  **	ret
     271  */
     272  TEST_UNIFORM_Z (dup_m7f02_u64, svuint64_t,
     273  		z0 = svdup_n_u64 (-0x7f02),
     274  		z0 = svdup_u64 (-0x7f02))
     275  
     276  /*
     277  ** dup_m7ffe_u64:
     278  **	mov	(x[0-9]+), -32766
     279  **	mov	z0\.d, \1
     280  **	ret
     281  */
     282  TEST_UNIFORM_Z (dup_m7ffe_u64, svuint64_t,
     283  		z0 = svdup_n_u64 (-0x7ffe),
     284  		z0 = svdup_u64 (-0x7ffe))
     285  
     286  /*
     287  ** dup_m7fff_u64:
     288  **	mov	z0\.d, #-32767
     289  **	ret
     290  */
     291  TEST_UNIFORM_Z (dup_m7fff_u64, svuint64_t,
     292  		z0 = svdup_n_u64 (-0x7fff),
     293  		z0 = svdup_u64 (-0x7fff))
     294  
     295  /*
     296  ** dup_m8000_u64:
     297  **	mov	z0\.d, #-32768
     298  **	ret
     299  */
     300  TEST_UNIFORM_Z (dup_m8000_u64, svuint64_t,
     301  		z0 = svdup_n_u64 (-0x8000),
     302  		z0 = svdup_u64 (-0x8000))
     303  
     304  /*
     305  ** dup_x0_u64:
     306  **	mov	z0\.d, x0
     307  **	ret
     308  */
     309  TEST_UNIFORM_ZX (dup_x0_u64, svuint64_t, uint64_t,
     310  		 z0 = svdup_n_u64 (x0),
     311  		 z0 = svdup_u64 (x0))
     312  
     313  /*
     314  ** dup_1_u64_m:
     315  **	mov	z0\.d, p0/m, #1
     316  **	ret
     317  */
     318  TEST_UNIFORM_Z (dup_1_u64_m, svuint64_t,
     319  		z0 = svdup_n_u64_m (z0, p0, 1),
     320  		z0 = svdup_u64_m (z0, p0, 1))
     321  
     322  /*
     323  ** dup_127_u64_m:
     324  **	mov	z0\.d, p0/m, #127
     325  **	ret
     326  */
     327  TEST_UNIFORM_Z (dup_127_u64_m, svuint64_t,
     328  		z0 = svdup_n_u64_m (z0, p0, 127),
     329  		z0 = svdup_u64_m (z0, p0, 127))
     330  
     331  /*
     332  ** dup_128_u64_m:
     333  **	mov	(z[0-9]+\.d), #128
     334  **	sel	z0\.d, p0, \1, z0\.d
     335  **	ret
     336  */
     337  TEST_UNIFORM_Z (dup_128_u64_m, svuint64_t,
     338  		z0 = svdup_n_u64_m (z0, p0, 128),
     339  		z0 = svdup_u64_m (z0, p0, 128))
     340  
     341  /* TODO: Bad code and needs fixing.  */
     342  TEST_UNIFORM_Z (dup_129_u64_m, svuint64_t,
     343  		z0 = svdup_n_u64_m (z0, p0, 129),
     344  		z0 = svdup_u64_m (z0, p0, 129))
     345  
     346  /* TODO: Bad code and needs fixing.  */
     347  TEST_UNIFORM_Z (dup_253_u64_m, svuint64_t,
     348  		z0 = svdup_n_u64_m (z0, p0, 253),
     349  		z0 = svdup_u64_m (z0, p0, 253))
     350  
     351  /*
     352  ** dup_254_u64_m:
     353  **	mov	(z[0-9]+\.d), #254
     354  **	sel	z0\.d, p0, \1, z0\.d
     355  **	ret
     356  */
     357  TEST_UNIFORM_Z (dup_254_u64_m, svuint64_t,
     358  		z0 = svdup_n_u64_m (z0, p0, 254),
     359  		z0 = svdup_u64_m (z0, p0, 254))
     360  
     361  /*
     362  ** dup_255_u64_m:
     363  **	mov	(z[0-9]+\.d), #255
     364  **	sel	z0\.d, p0, \1, z0\.d
     365  **	ret
     366  */
     367  TEST_UNIFORM_Z (dup_255_u64_m, svuint64_t,
     368  		z0 = svdup_n_u64_m (z0, p0, 255),
     369  		z0 = svdup_u64_m (z0, p0, 255))
     370  
     371  /*
     372  ** dup_256_u64_m:
     373  **	mov	z0\.d, p0/m, #256
     374  **	ret
     375  */
     376  TEST_UNIFORM_Z (dup_256_u64_m, svuint64_t,
     377  		z0 = svdup_n_u64_m (z0, p0, 256),
     378  		z0 = svdup_u64_m (z0, p0, 256))
     379  
     380  /* TODO: Bad code and needs fixing.  */
     381  TEST_UNIFORM_Z (dup_257_u64_m, svuint64_t,
     382  		z0 = svdup_n_u64_m (z0, p0, 257),
     383  		z0 = svdup_u64_m (z0, p0, 257))
     384  
     385  /*
     386  ** dup_512_u64_m:
     387  **	mov	z0\.d, p0/m, #512
     388  **	ret
     389  */
     390  TEST_UNIFORM_Z (dup_512_u64_m, svuint64_t,
     391  		z0 = svdup_n_u64_m (z0, p0, 512),
     392  		z0 = svdup_u64_m (z0, p0, 512))
     393  
     394  /*
     395  ** dup_7f00_u64_m:
     396  **	mov	z0\.d, p0/m, #32512
     397  **	ret
     398  */
     399  TEST_UNIFORM_Z (dup_7f00_u64_m, svuint64_t,
     400  		z0 = svdup_n_u64_m (z0, p0, 0x7f00),
     401  		z0 = svdup_u64_m (z0, p0, 0x7f00))
     402  
     403  /* TODO: Bad code and needs fixing.  */
     404  TEST_UNIFORM_Z (dup_7f01_u64_m, svuint64_t,
     405  		z0 = svdup_n_u64_m (z0, p0, 0x7f01),
     406  		z0 = svdup_u64_m (z0, p0, 0x7f01))
     407  
     408  /* TODO: Bad code and needs fixing.  */
     409  TEST_UNIFORM_Z (dup_7ffd_u64_m, svuint64_t,
     410  		z0 = svdup_n_u64_m (z0, p0, 0x7ffd),
     411  		z0 = svdup_u64_m (z0, p0, 0x7ffd))
     412  
     413  /*
     414  ** dup_7ffe_u64_m:
     415  **	mov	(z[0-9]+\.d), #32766
     416  **	sel	z0\.d, p0, \1, z0\.d
     417  **	ret
     418  */
     419  TEST_UNIFORM_Z (dup_7ffe_u64_m, svuint64_t,
     420  		z0 = svdup_n_u64_m (z0, p0, 0x7ffe),
     421  		z0 = svdup_u64_m (z0, p0, 0x7ffe))
     422  
     423  /*
     424  ** dup_7fff_u64_m:
     425  **	mov	(z[0-9]+\.d), #32767
     426  **	sel	z0\.d, p0, \1, z0\.d
     427  **	ret
     428  */
     429  TEST_UNIFORM_Z (dup_7fff_u64_m, svuint64_t,
     430  		z0 = svdup_n_u64_m (z0, p0, 0x7fff),
     431  		z0 = svdup_u64_m (z0, p0, 0x7fff))
     432  
     433  /*
     434  ** dup_m1_u64_m:
     435  **	mov	z0\.d, p0/m, #-1
     436  **	ret
     437  */
     438  TEST_UNIFORM_Z (dup_m1_u64_m, svuint64_t,
     439  		z0 = svdup_n_u64_m (z0, p0, -1),
     440  		z0 = svdup_u64_m (z0, p0, -1))
     441  
     442  /*
     443  ** dup_m128_u64_m:
     444  **	mov	z0\.d, p0/m, #-128
     445  **	ret
     446  */
     447  TEST_UNIFORM_Z (dup_m128_u64_m, svuint64_t,
     448  		z0 = svdup_n_u64_m (z0, p0, -128),
     449  		z0 = svdup_u64_m (z0, p0, -128))
     450  
     451  /*
     452  ** dup_m129_u64_m:
     453  **	mov	(z[0-9]+\.d), #-129
     454  **	sel	z0\.d, p0, \1, z0\.d
     455  **	ret
     456  */
     457  TEST_UNIFORM_Z (dup_m129_u64_m, svuint64_t,
     458  		z0 = svdup_n_u64_m (z0, p0, -129),
     459  		z0 = svdup_u64_m (z0, p0, -129))
     460  
     461  /* TODO: Bad code and needs fixing.  */
     462  TEST_UNIFORM_Z (dup_m130_u64_m, svuint64_t,
     463  		z0 = svdup_n_u64_m (z0, p0, -130),
     464  		z0 = svdup_u64_m (z0, p0, -130))
     465  
     466  /* TODO: Bad code and needs fixing.  */
     467  TEST_UNIFORM_Z (dup_m254_u64_m, svuint64_t,
     468  		z0 = svdup_n_u64_m (z0, p0, -254),
     469  		z0 = svdup_u64_m (z0, p0, -254))
     470  
     471  /*
     472  ** dup_m255_u64_m:
     473  **	mov	(z[0-9]+\.d), #-255
     474  **	sel	z0\.d, p0, \1, z0\.d
     475  **	ret
     476  */
     477  TEST_UNIFORM_Z (dup_m255_u64_m, svuint64_t,
     478  		z0 = svdup_n_u64_m (z0, p0, -255),
     479  		z0 = svdup_u64_m (z0, p0, -255))
     480  
     481  /*
     482  ** dup_m256_u64_m:
     483  **	mov	z0\.d, p0/m, #-256
     484  **	ret
     485  */
     486  TEST_UNIFORM_Z (dup_m256_u64_m, svuint64_t,
     487  		z0 = svdup_n_u64_m (z0, p0, -256),
     488  		z0 = svdup_u64_m (z0, p0, -256))
     489  
     490  /*
     491  ** dup_m257_u64_m:
     492  **	mov	(z[0-9]+\.d), #-257
     493  **	sel	z0\.d, p0, \1, z0\.d
     494  **	ret
     495  */
     496  TEST_UNIFORM_Z (dup_m257_u64_m, svuint64_t,
     497  		z0 = svdup_n_u64_m (z0, p0, -257),
     498  		z0 = svdup_u64_m (z0, p0, -257))
     499  
     500  /* TODO: Bad code and needs fixing.  */
     501  TEST_UNIFORM_Z (dup_m258_u64_m, svuint64_t,
     502  		z0 = svdup_n_u64_m (z0, p0, -258),
     503  		z0 = svdup_u64_m (z0, p0, -258))
     504  
     505  /* TODO: Bad code and needs fixing.  */
     506  TEST_UNIFORM_Z (dup_m259_u64_m, svuint64_t,
     507  		z0 = svdup_n_u64_m (z0, p0, -259),
     508  		z0 = svdup_u64_m (z0, p0, -259))
     509  
     510  /*
     511  ** dup_m512_u64_m:
     512  **	mov	z0\.d, p0/m, #-512
     513  **	ret
     514  */
     515  TEST_UNIFORM_Z (dup_m512_u64_m, svuint64_t,
     516  		z0 = svdup_n_u64_m (z0, p0, -512),
     517  		z0 = svdup_u64_m (z0, p0, -512))
     518  
     519  /*
     520  ** dup_m7f00_u64_m:
     521  **	mov	z0\.d, p0/m, #-32512
     522  **	ret
     523  */
     524  TEST_UNIFORM_Z (dup_m7f00_u64_m, svuint64_t,
     525  		z0 = svdup_n_u64_m (z0, p0, -0x7f00),
     526  		z0 = svdup_u64_m (z0, p0, -0x7f00))
     527  
     528  /*
     529  ** dup_m7f01_u64_m:
     530  **	mov	(z[0-9]+\.d), #-32513
     531  **	sel	z0\.d, p0, \1, z0\.d
     532  **	ret
     533  */
     534  TEST_UNIFORM_Z (dup_m7f01_u64_m, svuint64_t,
     535  		z0 = svdup_n_u64_m (z0, p0, -0x7f01),
     536  		z0 = svdup_u64_m (z0, p0, -0x7f01))
     537  
     538  /* TODO: Bad code and needs fixing.  */
     539  TEST_UNIFORM_Z (dup_m7f02_u64_m, svuint64_t,
     540  		z0 = svdup_n_u64_m (z0, p0, -0x7f02),
     541  		z0 = svdup_u64_m (z0, p0, -0x7f02))
     542  
     543  /* TODO: Bad code and needs fixing.  */
     544  TEST_UNIFORM_Z (dup_m7ffe_u64_m, svuint64_t,
     545  		z0 = svdup_n_u64_m (z0, p0, -0x7ffe),
     546  		z0 = svdup_u64_m (z0, p0, -0x7ffe))
     547  
     548  /*
     549  ** dup_m7fff_u64_m:
     550  **	mov	(z[0-9]+\.d), #-32767
     551  **	sel	z0\.d, p0, \1, z0\.d
     552  **	ret
     553  */
     554  TEST_UNIFORM_Z (dup_m7fff_u64_m, svuint64_t,
     555  		z0 = svdup_n_u64_m (z0, p0, -0x7fff),
     556  		z0 = svdup_u64_m (z0, p0, -0x7fff))
     557  
     558  /*
     559  ** dup_m8000_u64_m:
     560  **	mov	z0\.d, p0/m, #-32768
     561  **	ret
     562  */
     563  TEST_UNIFORM_Z (dup_m8000_u64_m, svuint64_t,
     564  		z0 = svdup_n_u64_m (z0, p0, -0x8000),
     565  		z0 = svdup_u64_m (z0, p0, -0x8000))
     566  
     567  /*
     568  ** dup_0_u64_m:
     569  **	mov	z0\.d, p0/m, #0
     570  **	ret
     571  */
     572  TEST_UNIFORM_Z (dup_0_u64_m, svuint64_t,
     573  		z0 = svdup_n_u64_m (z0, p0, 0),
     574  		z0 = svdup_u64_m (z0, p0, 0))
     575  
     576  /*
     577  ** dup_x0_u64_m:
     578  **	movprfx	z0, z1
     579  **	mov	z0\.d, p0/m, x0
     580  **	ret
     581  */
     582  TEST_UNIFORM_ZX (dup_x0_u64_m, svuint64_t, uint64_t,
     583  		z0 = svdup_n_u64_m (z1, p0, x0),
     584  		z0 = svdup_u64_m (z1, p0, x0))
     585  
     586  /*
     587  ** dup_1_u64_z:
     588  **	mov	z0\.d, p0/z, #1
     589  **	ret
     590  */
     591  TEST_UNIFORM_Z (dup_1_u64_z, svuint64_t,
     592  		z0 = svdup_n_u64_z (p0, 1),
     593  		z0 = svdup_u64_z (p0, 1))
     594  
     595  /*
     596  ** dup_127_u64_z:
     597  **	mov	z0\.d, p0/z, #127
     598  **	ret
     599  */
     600  TEST_UNIFORM_Z (dup_127_u64_z, svuint64_t,
     601  		z0 = svdup_n_u64_z (p0, 127),
     602  		z0 = svdup_u64_z (p0, 127))
     603  
     604  /*
     605  ** dup_128_u64_z:
     606  **	mov	(z[0-9]+)\.b, #0
     607  **	mov	(z[0-9]+\.d), #128
     608  **	sel	z0\.d, p0, \2, \1\.d
     609  **	ret
     610  */
     611  TEST_UNIFORM_Z (dup_128_u64_z, svuint64_t,
     612  		z0 = svdup_n_u64_z (p0, 128),
     613  		z0 = svdup_u64_z (p0, 128))
     614  
     615  /* TODO: Bad code and needs fixing.  */
     616  TEST_UNIFORM_Z (dup_129_u64_z, svuint64_t,
     617  		z0 = svdup_n_u64_z (p0, 129),
     618  		z0 = svdup_u64_z (p0, 129))
     619  
     620  /* TODO: Bad code and needs fixing.  */
     621  TEST_UNIFORM_Z (dup_253_u64_z, svuint64_t,
     622  		z0 = svdup_n_u64_z (p0, 253),
     623  		z0 = svdup_u64_z (p0, 253))
     624  
     625  /*
     626  ** dup_254_u64_z:
     627  **	mov	(z[0-9]+)\.b, #0
     628  **	mov	(z[0-9]+\.d), #254
     629  **	sel	z0\.d, p0, \2, \1\.d
     630  **	ret
     631  */
     632  TEST_UNIFORM_Z (dup_254_u64_z, svuint64_t,
     633  		z0 = svdup_n_u64_z (p0, 254),
     634  		z0 = svdup_u64_z (p0, 254))
     635  
     636  /*
     637  ** dup_255_u64_z:
     638  **	mov	(z[0-9]+)\.b, #0
     639  **	mov	(z[0-9]+\.d), #255
     640  **	sel	z0\.d, p0, \2, \1\.d
     641  **	ret
     642  */
     643  TEST_UNIFORM_Z (dup_255_u64_z, svuint64_t,
     644  		z0 = svdup_n_u64_z (p0, 255),
     645  		z0 = svdup_u64_z (p0, 255))
     646  
     647  /*
     648  ** dup_256_u64_z:
     649  **	mov	z0\.d, p0/z, #256
     650  **	ret
     651  */
     652  TEST_UNIFORM_Z (dup_256_u64_z, svuint64_t,
     653  		z0 = svdup_n_u64_z (p0, 256),
     654  		z0 = svdup_u64_z (p0, 256))
     655  
     656  /* TODO: Bad code and needs fixing.  */
     657  TEST_UNIFORM_Z (dup_257_u64_z, svuint64_t,
     658  		z0 = svdup_n_u64_z (p0, 257),
     659  		z0 = svdup_u64_z (p0, 257))
     660  
     661  /*
     662  ** dup_512_u64_z:
     663  **	mov	z0\.d, p0/z, #512
     664  **	ret
     665  */
     666  TEST_UNIFORM_Z (dup_512_u64_z, svuint64_t,
     667  		z0 = svdup_n_u64_z (p0, 512),
     668  		z0 = svdup_u64_z (p0, 512))
     669  
     670  /*
     671  ** dup_7f00_u64_z:
     672  **	mov	z0\.d, p0/z, #32512
     673  **	ret
     674  */
     675  TEST_UNIFORM_Z (dup_7f00_u64_z, svuint64_t,
     676  		z0 = svdup_n_u64_z (p0, 0x7f00),
     677  		z0 = svdup_u64_z (p0, 0x7f00))
     678  
     679  /* TODO: Bad code and needs fixing.  */
     680  TEST_UNIFORM_Z (dup_7f01_u64_z, svuint64_t,
     681  		z0 = svdup_n_u64_z (p0, 0x7f01),
     682  		z0 = svdup_u64_z (p0, 0x7f01))
     683  
     684  /* TODO: Bad code and needs fixing.  */
     685  TEST_UNIFORM_Z (dup_7ffd_u64_z, svuint64_t,
     686  		z0 = svdup_n_u64_z (p0, 0x7ffd),
     687  		z0 = svdup_u64_z (p0, 0x7ffd))
     688  
     689  /*
     690  ** dup_7ffe_u64_z:
     691  **	mov	(z[0-9]+)\.b, #0
     692  **	mov	(z[0-9]+\.d), #32766
     693  **	sel	z0\.d, p0, \2, \1\.d
     694  **	ret
     695  */
     696  TEST_UNIFORM_Z (dup_7ffe_u64_z, svuint64_t,
     697  		z0 = svdup_n_u64_z (p0, 0x7ffe),
     698  		z0 = svdup_u64_z (p0, 0x7ffe))
     699  
     700  /*
     701  ** dup_7fff_u64_z:
     702  **	mov	(z[0-9]+)\.b, #0
     703  **	mov	(z[0-9]+\.d), #32767
     704  **	sel	z0\.d, p0, \2, \1\.d
     705  **	ret
     706  */
     707  TEST_UNIFORM_Z (dup_7fff_u64_z, svuint64_t,
     708  		z0 = svdup_n_u64_z (p0, 0x7fff),
     709  		z0 = svdup_u64_z (p0, 0x7fff))
     710  
     711  /*
     712  ** dup_m1_u64_z:
     713  **	mov	z0\.d, p0/z, #-1
     714  **	ret
     715  */
     716  TEST_UNIFORM_Z (dup_m1_u64_z, svuint64_t,
     717  		z0 = svdup_n_u64_z (p0, -1),
     718  		z0 = svdup_u64_z (p0, -1))
     719  
     720  /*
     721  ** dup_m128_u64_z:
     722  **	mov	z0\.d, p0/z, #-128
     723  **	ret
     724  */
     725  TEST_UNIFORM_Z (dup_m128_u64_z, svuint64_t,
     726  		z0 = svdup_n_u64_z (p0, -128),
     727  		z0 = svdup_u64_z (p0, -128))
     728  
     729  /*
     730  ** dup_m129_u64_z:
     731  **	mov	(z[0-9]+)\.b, #0
     732  **	mov	(z[0-9]+\.d), #-129
     733  **	sel	z0\.d, p0, \2, \1\.d
     734  **	ret
     735  */
     736  TEST_UNIFORM_Z (dup_m129_u64_z, svuint64_t,
     737  		z0 = svdup_n_u64_z (p0, -129),
     738  		z0 = svdup_u64_z (p0, -129))
     739  
     740  /* TODO: Bad code and needs fixing.  */
     741  TEST_UNIFORM_Z (dup_m130_u64_z, svuint64_t,
     742  		z0 = svdup_n_u64_z (p0, -130),
     743  		z0 = svdup_u64_z (p0, -130))
     744  
     745  /* TODO: Bad code and needs fixing.  */
     746  TEST_UNIFORM_Z (dup_m254_u64_z, svuint64_t,
     747  		z0 = svdup_n_u64_z (p0, -254),
     748  		z0 = svdup_u64_z (p0, -254))
     749  
     750  /*
     751  ** dup_m255_u64_z:
     752  **	mov	(z[0-9]+)\.b, #0
     753  **	mov	(z[0-9]+\.d), #-255
     754  **	sel	z0\.d, p0, \2, \1\.d
     755  **	ret
     756  */
     757  TEST_UNIFORM_Z (dup_m255_u64_z, svuint64_t,
     758  		z0 = svdup_n_u64_z (p0, -255),
     759  		z0 = svdup_u64_z (p0, -255))
     760  
     761  /*
     762  ** dup_m256_u64_z:
     763  **	mov	z0\.d, p0/z, #-256
     764  **	ret
     765  */
     766  TEST_UNIFORM_Z (dup_m256_u64_z, svuint64_t,
     767  		z0 = svdup_n_u64_z (p0, -256),
     768  		z0 = svdup_u64_z (p0, -256))
     769  
     770  /*
     771  ** dup_m257_u64_z:
     772  **	mov	(z[0-9]+)\.b, #0
     773  **	mov	(z[0-9]+\.d), #-257
     774  **	sel	z0\.d, p0, \2, \1\.d
     775  **	ret
     776  */
     777  TEST_UNIFORM_Z (dup_m257_u64_z, svuint64_t,
     778  		z0 = svdup_n_u64_z (p0, -257),
     779  		z0 = svdup_u64_z (p0, -257))
     780  
     781  /* TODO: Bad code and needs fixing.  */
     782  TEST_UNIFORM_Z (dup_m258_u64_z, svuint64_t,
     783  		z0 = svdup_n_u64_z (p0, -258),
     784  		z0 = svdup_u64_z (p0, -258))
     785  
     786  /* TODO: Bad code and needs fixing.  */
     787  TEST_UNIFORM_Z (dup_m259_u64_z, svuint64_t,
     788  		z0 = svdup_n_u64_z (p0, -259),
     789  		z0 = svdup_u64_z (p0, -259))
     790  
     791  /*
     792  ** dup_m512_u64_z:
     793  **	mov	z0\.d, p0/z, #-512
     794  **	ret
     795  */
     796  TEST_UNIFORM_Z (dup_m512_u64_z, svuint64_t,
     797  		z0 = svdup_n_u64_z (p0, -512),
     798  		z0 = svdup_u64_z (p0, -512))
     799  
     800  /*
     801  ** dup_m7f00_u64_z:
     802  **	mov	z0\.d, p0/z, #-32512
     803  **	ret
     804  */
     805  TEST_UNIFORM_Z (dup_m7f00_u64_z, svuint64_t,
     806  		z0 = svdup_n_u64_z (p0, -0x7f00),
     807  		z0 = svdup_u64_z (p0, -0x7f00))
     808  
     809  /*
     810  ** dup_m7f01_u64_z:
     811  **	mov	(z[0-9]+)\.b, #0
     812  **	mov	(z[0-9]+\.d), #-32513
     813  **	sel	z0\.d, p0, \2, \1\.d
     814  **	ret
     815  */
     816  TEST_UNIFORM_Z (dup_m7f01_u64_z, svuint64_t,
     817  		z0 = svdup_n_u64_z (p0, -0x7f01),
     818  		z0 = svdup_u64_z (p0, -0x7f01))
     819  
     820  /* TODO: Bad code and needs fixing.  */
     821  TEST_UNIFORM_Z (dup_m7f02_u64_z, svuint64_t,
     822  		z0 = svdup_n_u64_z (p0, -0x7f02),
     823  		z0 = svdup_u64_z (p0, -0x7f02))
     824  
     825  /* TODO: Bad code and needs fixing.  */
     826  TEST_UNIFORM_Z (dup_m7ffe_u64_z, svuint64_t,
     827  		z0 = svdup_n_u64_z (p0, -0x7ffe),
     828  		z0 = svdup_u64_z (p0, -0x7ffe))
     829  
     830  /*
     831  ** dup_m7fff_u64_z:
     832  **	mov	(z[0-9]+)\.b, #0
     833  **	mov	(z[0-9]+\.d), #-32767
     834  **	sel	z0\.d, p0, \2, \1\.d
     835  **	ret
     836  */
     837  TEST_UNIFORM_Z (dup_m7fff_u64_z, svuint64_t,
     838  		z0 = svdup_n_u64_z (p0, -0x7fff),
     839  		z0 = svdup_u64_z (p0, -0x7fff))
     840  
     841  /*
     842  ** dup_m8000_u64_z:
     843  **	mov	z0\.d, p0/z, #-32768
     844  **	ret
     845  */
     846  TEST_UNIFORM_Z (dup_m8000_u64_z, svuint64_t,
     847  		z0 = svdup_n_u64_z (p0, -0x8000),
     848  		z0 = svdup_u64_z (p0, -0x8000))
     849  
     850  /*
     851  ** dup_0_u64_z:
     852  **	mov	z0\.[bhsd], #0
     853  **	ret
     854  */
     855  TEST_UNIFORM_Z (dup_0_u64_z, svuint64_t,
     856  		z0 = svdup_n_u64_z (p0, 0),
     857  		z0 = svdup_u64_z (p0, 0))
     858  
     859  /*
     860  ** dup_x0_u64_z:
     861  **	movprfx	z0\.d, p0/z, z0\.d
     862  **	mov	z0\.d, p0/m, x0
     863  **	ret
     864  */
     865  TEST_UNIFORM_ZX (dup_x0_u64_z, svuint64_t, uint64_t,
     866  		z0 = svdup_n_u64_z (p0, x0),
     867  		z0 = svdup_u64_z (p0, x0))
     868  
     869  /*
     870  ** dup_1_u64_x:
     871  **	mov	z0\.d, #1
     872  **	ret
     873  */
     874  TEST_UNIFORM_Z (dup_1_u64_x, svuint64_t,
     875  		z0 = svdup_n_u64_x (p0, 1),
     876  		z0 = svdup_u64_x (p0, 1))
     877  
     878  /*
     879  ** dup_127_u64_x:
     880  **	mov	z0\.d, #127
     881  **	ret
     882  */
     883  TEST_UNIFORM_Z (dup_127_u64_x, svuint64_t,
     884  		z0 = svdup_n_u64_x (p0, 127),
     885  		z0 = svdup_u64_x (p0, 127))
     886  
     887  /*
     888  ** dup_128_u64_x:
     889  **	mov	z0\.d, #128
     890  **	ret
     891  */
     892  TEST_UNIFORM_Z (dup_128_u64_x, svuint64_t,
     893  		z0 = svdup_n_u64_x (p0, 128),
     894  		z0 = svdup_u64_x (p0, 128))
     895  
     896  /*
     897  ** dup_129_u64_x:
     898  **	mov	(x[0-9]+), 129
     899  **	mov	z0\.d, \1
     900  **	ret
     901  */
     902  TEST_UNIFORM_Z (dup_129_u64_x, svuint64_t,
     903  		z0 = svdup_n_u64_x (p0, 129),
     904  		z0 = svdup_u64_x (p0, 129))
     905  
     906  /*
     907  ** dup_253_u64_x:
     908  **	mov	(x[0-9]+), 253
     909  **	mov	z0\.d, \1
     910  **	ret
     911  */
     912  TEST_UNIFORM_Z (dup_253_u64_x, svuint64_t,
     913  		z0 = svdup_n_u64_x (p0, 253),
     914  		z0 = svdup_u64_x (p0, 253))
     915  
     916  /*
     917  ** dup_254_u64_x:
     918  **	mov	z0\.d, #254
     919  **	ret
     920  */
     921  TEST_UNIFORM_Z (dup_254_u64_x, svuint64_t,
     922  		z0 = svdup_n_u64_x (p0, 254),
     923  		z0 = svdup_u64_x (p0, 254))
     924  
     925  /*
     926  ** dup_255_u64_x:
     927  **	mov	z0\.d, #255
     928  **	ret
     929  */
     930  TEST_UNIFORM_Z (dup_255_u64_x, svuint64_t,
     931  		z0 = svdup_n_u64_x (p0, 255),
     932  		z0 = svdup_u64_x (p0, 255))
     933  
     934  /*
     935  ** dup_256_u64_x:
     936  **	mov	z0\.d, #256
     937  **	ret
     938  */
     939  TEST_UNIFORM_Z (dup_256_u64_x, svuint64_t,
     940  		z0 = svdup_n_u64_x (p0, 256),
     941  		z0 = svdup_u64_x (p0, 256))
     942  
     943  /*
     944  ** dup_257_u64_x:
     945  **	mov	(x[0-9]+), 257
     946  **	mov	z0\.d, \1
     947  **	ret
     948  */
     949  TEST_UNIFORM_Z (dup_257_u64_x, svuint64_t,
     950  		z0 = svdup_n_u64_x (p0, 257),
     951  		z0 = svdup_u64_x (p0, 257))
     952  
     953  /*
     954  ** dup_512_u64_x:
     955  **	mov	z0\.d, #512
     956  **	ret
     957  */
     958  TEST_UNIFORM_Z (dup_512_u64_x, svuint64_t,
     959  		z0 = svdup_n_u64_x (p0, 512),
     960  		z0 = svdup_u64_x (p0, 512))
     961  
     962  /*
     963  ** dup_7f00_u64_x:
     964  **	mov	z0\.d, #32512
     965  **	ret
     966  */
     967  TEST_UNIFORM_Z (dup_7f00_u64_x, svuint64_t,
     968  		z0 = svdup_n_u64_x (p0, 0x7f00),
     969  		z0 = svdup_u64_x (p0, 0x7f00))
     970  
     971  /*
     972  ** dup_7f01_u64_x:
     973  **	mov	(x[0-9]+), 32513
     974  **	mov	z0\.d, \1
     975  **	ret
     976  */
     977  TEST_UNIFORM_Z (dup_7f01_u64_x, svuint64_t,
     978  		z0 = svdup_n_u64_x (p0, 0x7f01),
     979  		z0 = svdup_u64_x (p0, 0x7f01))
     980  
     981  /*
     982  ** dup_7ffd_u64_x:
     983  **	mov	(x[0-9]+), 32765
     984  **	mov	z0\.d, \1
     985  **	ret
     986  */
     987  TEST_UNIFORM_Z (dup_7ffd_u64_x, svuint64_t,
     988  		z0 = svdup_n_u64_x (p0, 0x7ffd),
     989  		z0 = svdup_u64_x (p0, 0x7ffd))
     990  
     991  /*
     992  ** dup_7ffe_u64_x:
     993  **	mov	z0\.d, #32766
     994  **	ret
     995  */
     996  TEST_UNIFORM_Z (dup_7ffe_u64_x, svuint64_t,
     997  		z0 = svdup_n_u64_x (p0, 0x7ffe),
     998  		z0 = svdup_u64_x (p0, 0x7ffe))
     999  
    1000  /*
    1001  ** dup_7fff_u64_x:
    1002  **	mov	z0\.d, #32767
    1003  **	ret
    1004  */
    1005  TEST_UNIFORM_Z (dup_7fff_u64_x, svuint64_t,
    1006  		z0 = svdup_n_u64_x (p0, 0x7fff),
    1007  		z0 = svdup_u64_x (p0, 0x7fff))
    1008  
    1009  /*
    1010  ** dup_m1_u64_x:
    1011  **	mov	z0\.b, #-1
    1012  **	ret
    1013  */
    1014  TEST_UNIFORM_Z (dup_m1_u64_x, svuint64_t,
    1015  		z0 = svdup_n_u64_x (p0, -1),
    1016  		z0 = svdup_u64_x (p0, -1))
    1017  
    1018  /*
    1019  ** dup_m128_u64_x:
    1020  **	mov	z0\.d, #-128
    1021  **	ret
    1022  */
    1023  TEST_UNIFORM_Z (dup_m128_u64_x, svuint64_t,
    1024  		z0 = svdup_n_u64_x (p0, -128),
    1025  		z0 = svdup_u64_x (p0, -128))
    1026  
    1027  /*
    1028  ** dup_m129_u64_x:
    1029  **	mov	z0\.d, #-129
    1030  **	ret
    1031  */
    1032  TEST_UNIFORM_Z (dup_m129_u64_x, svuint64_t,
    1033  		z0 = svdup_n_u64_x (p0, -129),
    1034  		z0 = svdup_u64_x (p0, -129))
    1035  
    1036  /*
    1037  ** dup_m130_u64_x:
    1038  **	mov	(x[0-9]+), -130
    1039  **	mov	z0\.d, \1
    1040  **	ret
    1041  */
    1042  TEST_UNIFORM_Z (dup_m130_u64_x, svuint64_t,
    1043  		z0 = svdup_n_u64_x (p0, -130),
    1044  		z0 = svdup_u64_x (p0, -130))
    1045  
    1046  /*
    1047  ** dup_m254_u64_x:
    1048  **	mov	(x[0-9]+), -254
    1049  **	mov	z0\.d, \1
    1050  **	ret
    1051  */
    1052  TEST_UNIFORM_Z (dup_m254_u64_x, svuint64_t,
    1053  		z0 = svdup_n_u64_x (p0, -254),
    1054  		z0 = svdup_u64_x (p0, -254))
    1055  
    1056  /*
    1057  ** dup_m255_u64_x:
    1058  **	mov	z0\.d, #-255
    1059  **	ret
    1060  */
    1061  TEST_UNIFORM_Z (dup_m255_u64_x, svuint64_t,
    1062  		z0 = svdup_n_u64_x (p0, -255),
    1063  		z0 = svdup_u64_x (p0, -255))
    1064  
    1065  /*
    1066  ** dup_m256_u64_x:
    1067  **	mov	z0\.d, #-256
    1068  **	ret
    1069  */
    1070  TEST_UNIFORM_Z (dup_m256_u64_x, svuint64_t,
    1071  		z0 = svdup_n_u64_x (p0, -256),
    1072  		z0 = svdup_u64_x (p0, -256))
    1073  
    1074  /*
    1075  ** dup_m257_u64_x:
    1076  **	mov	z0\.d, #-257
    1077  **	ret
    1078  */
    1079  TEST_UNIFORM_Z (dup_m257_u64_x, svuint64_t,
    1080  		z0 = svdup_n_u64_x (p0, -257),
    1081  		z0 = svdup_u64_x (p0, -257))
    1082  
    1083  /*
    1084  ** dup_m258_u64_x:
    1085  **	mov	(x[0-9]+), -258
    1086  **	mov	z0\.d, \1
    1087  **	ret
    1088  */
    1089  TEST_UNIFORM_Z (dup_m258_u64_x, svuint64_t,
    1090  		z0 = svdup_n_u64_x (p0, -258),
    1091  		z0 = svdup_u64_x (p0, -258))
    1092  
    1093  /*
    1094  ** dup_m259_u64_x:
    1095  **	mov	(x[0-9]+), -259
    1096  **	mov	z0\.d, \1
    1097  **	ret
    1098  */
    1099  TEST_UNIFORM_Z (dup_m259_u64_x, svuint64_t,
    1100  		z0 = svdup_n_u64_x (p0, -259),
    1101  		z0 = svdup_u64_x (p0, -259))
    1102  
    1103  /*
    1104  ** dup_m512_u64_x:
    1105  **	mov	z0\.d, #-512
    1106  **	ret
    1107  */
    1108  TEST_UNIFORM_Z (dup_m512_u64_x, svuint64_t,
    1109  		z0 = svdup_n_u64_x (p0, -512),
    1110  		z0 = svdup_u64_x (p0, -512))
    1111  
    1112  /*
    1113  ** dup_m7f00_u64_x:
    1114  **	mov	z0\.d, #-32512
    1115  **	ret
    1116  */
    1117  TEST_UNIFORM_Z (dup_m7f00_u64_x, svuint64_t,
    1118  		z0 = svdup_n_u64_x (p0, -0x7f00),
    1119  		z0 = svdup_u64_x (p0, -0x7f00))
    1120  
    1121  /*
    1122  ** dup_m7f01_u64_x:
    1123  **	mov	z0\.d, #-32513
    1124  **	ret
    1125  */
    1126  TEST_UNIFORM_Z (dup_m7f01_u64_x, svuint64_t,
    1127  		z0 = svdup_n_u64_x (p0, -0x7f01),
    1128  		z0 = svdup_u64_x (p0, -0x7f01))
    1129  
    1130  /*
    1131  ** dup_m7f02_u64_x:
    1132  **	mov	(x[0-9]+), -32514
    1133  **	mov	z0\.d, \1
    1134  **	ret
    1135  */
    1136  TEST_UNIFORM_Z (dup_m7f02_u64_x, svuint64_t,
    1137  		z0 = svdup_n_u64_x (p0, -0x7f02),
    1138  		z0 = svdup_u64_x (p0, -0x7f02))
    1139  
    1140  /*
    1141  ** dup_m7ffe_u64_x:
    1142  **	mov	(x[0-9]+), -32766
    1143  **	mov	z0\.d, \1
    1144  **	ret
    1145  */
    1146  TEST_UNIFORM_Z (dup_m7ffe_u64_x, svuint64_t,
    1147  		z0 = svdup_n_u64_x (p0, -0x7ffe),
    1148  		z0 = svdup_u64_x (p0, -0x7ffe))
    1149  
    1150  /*
    1151  ** dup_m7fff_u64_x:
    1152  **	mov	z0\.d, #-32767
    1153  **	ret
    1154  */
    1155  TEST_UNIFORM_Z (dup_m7fff_u64_x, svuint64_t,
    1156  		z0 = svdup_n_u64_x (p0, -0x7fff),
    1157  		z0 = svdup_u64_x (p0, -0x7fff))
    1158  
    1159  /*
    1160  ** dup_m8000_u64_x:
    1161  **	mov	z0\.d, #-32768
    1162  **	ret
    1163  */
    1164  TEST_UNIFORM_Z (dup_m8000_u64_x, svuint64_t,
    1165  		z0 = svdup_n_u64_x (p0, -0x8000),
    1166  		z0 = svdup_u64_x (p0, -0x8000))
    1167  
    1168  /*
    1169  ** dup_x0_u64_x:
    1170  **	mov	z0\.d, x0
    1171  **	ret
    1172  */
    1173  TEST_UNIFORM_ZX (dup_x0_u64_x, svuint64_t, uint64_t,
    1174  		z0 = svdup_n_u64_x (p0, x0),
    1175  		z0 = svdup_u64_x (p0, x0))