1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** dup_1_u16:
       7  **	mov	z0\.h, #1
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (dup_1_u16, svuint16_t,
      11  		z0 = svdup_n_u16 (1),
      12  		z0 = svdup_u16 (1))
      13  
      14  /*
      15  ** dup_127_u16:
      16  **	mov	z0\.h, #127
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (dup_127_u16, svuint16_t,
      20  		z0 = svdup_n_u16 (127),
      21  		z0 = svdup_u16 (127))
      22  
      23  /*
      24  ** dup_128_u16:
      25  **	mov	z0\.h, #128
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (dup_128_u16, svuint16_t,
      29  		z0 = svdup_n_u16 (128),
      30  		z0 = svdup_u16 (128))
      31  
      32  /*
      33  ** dup_129_u16:
      34  **	movi	v([0-9]+)\.8h, 0x81
      35  **	dup	z0\.q, z\1\.q\[0\]
      36  **	ret
      37  */
      38  TEST_UNIFORM_Z (dup_129_u16, svuint16_t,
      39  		z0 = svdup_n_u16 (129),
      40  		z0 = svdup_u16 (129))
      41  
      42  /*
      43  ** dup_253_u16:
      44  **	movi	v([0-9]+)\.8h, 0xfd
      45  **	dup	z0\.q, z\1\.q\[0\]
      46  **	ret
      47  */
      48  TEST_UNIFORM_Z (dup_253_u16, svuint16_t,
      49  		z0 = svdup_n_u16 (253),
      50  		z0 = svdup_u16 (253))
      51  
      52  /*
      53  ** dup_254_u16:
      54  **	mov	z0\.h, #254
      55  **	ret
      56  */
      57  TEST_UNIFORM_Z (dup_254_u16, svuint16_t,
      58  		z0 = svdup_n_u16 (254),
      59  		z0 = svdup_u16 (254))
      60  
      61  /*
      62  ** dup_255_u16:
      63  **	mov	z0\.h, #255
      64  **	ret
      65  */
      66  TEST_UNIFORM_Z (dup_255_u16, svuint16_t,
      67  		z0 = svdup_n_u16 (255),
      68  		z0 = svdup_u16 (255))
      69  
      70  /*
      71  ** dup_256_u16:
      72  **	mov	z0\.h, #256
      73  **	ret
      74  */
      75  TEST_UNIFORM_Z (dup_256_u16, svuint16_t,
      76  		z0 = svdup_n_u16 (256),
      77  		z0 = svdup_u16 (256))
      78  
      79  /*
      80  ** dup_257_u16:
      81  **	mov	z0\.b, #1
      82  **	ret
      83  */
      84  TEST_UNIFORM_Z (dup_257_u16, svuint16_t,
      85  		z0 = svdup_n_u16 (257),
      86  		z0 = svdup_u16 (257))
      87  
      88  /*
      89  ** dup_512_u16:
      90  **	mov	z0\.h, #512
      91  **	ret
      92  */
      93  TEST_UNIFORM_Z (dup_512_u16, svuint16_t,
      94  		z0 = svdup_n_u16 (512),
      95  		z0 = svdup_u16 (512))
      96  
      97  /*
      98  ** dup_7f00_u16:
      99  **	mov	z0\.h, #32512
     100  **	ret
     101  */
     102  TEST_UNIFORM_Z (dup_7f00_u16, svuint16_t,
     103  		z0 = svdup_n_u16 (0x7f00),
     104  		z0 = svdup_u16 (0x7f00))
     105  
     106  /*
     107  ** dup_7f01_u16:
     108  **	mov	(w[0-9]+), 32513
     109  **	mov	z0\.h, \1
     110  **	ret
     111  */
     112  TEST_UNIFORM_Z (dup_7f01_u16, svuint16_t,
     113  		z0 = svdup_n_u16 (0x7f01),
     114  		z0 = svdup_u16 (0x7f01))
     115  
     116  /*
     117  ** dup_7ffd_u16:
     118  **	mov	(w[0-9]+), 32765
     119  **	mov	z0\.h, \1
     120  **	ret
     121  */
     122  TEST_UNIFORM_Z (dup_7ffd_u16, svuint16_t,
     123  		z0 = svdup_n_u16 (0x7ffd),
     124  		z0 = svdup_u16 (0x7ffd))
     125  
     126  /*
     127  ** dup_7ffe_u16:
     128  **	mov	z0\.h, #32766
     129  **	ret
     130  */
     131  TEST_UNIFORM_Z (dup_7ffe_u16, svuint16_t,
     132  		z0 = svdup_n_u16 (0x7ffe),
     133  		z0 = svdup_u16 (0x7ffe))
     134  
     135  /*
     136  ** dup_7fff_u16:
     137  **	mov	z0\.h, #32767
     138  **	ret
     139  */
     140  TEST_UNIFORM_Z (dup_7fff_u16, svuint16_t,
     141  		z0 = svdup_n_u16 (0x7fff),
     142  		z0 = svdup_u16 (0x7fff))
     143  
     144  /*
     145  ** dup_m1_u16:
     146  **	mov	z0\.b, #-1
     147  **	ret
     148  */
     149  TEST_UNIFORM_Z (dup_m1_u16, svuint16_t,
     150  		z0 = svdup_n_u16 (-1),
     151  		z0 = svdup_u16 (-1))
     152  
     153  /*
     154  ** dup_m128_u16:
     155  **	mov	z0\.h, #-128
     156  **	ret
     157  */
     158  TEST_UNIFORM_Z (dup_m128_u16, svuint16_t,
     159  		z0 = svdup_n_u16 (-128),
     160  		z0 = svdup_u16 (-128))
     161  
     162  /*
     163  ** dup_m129_u16:
     164  **	mov	z0\.h, #-129
     165  **	ret
     166  */
     167  TEST_UNIFORM_Z (dup_m129_u16, svuint16_t,
     168  		z0 = svdup_n_u16 (-129),
     169  		z0 = svdup_u16 (-129))
     170  
     171  /*
     172  ** dup_m130_u16:
     173  **	mvni	v([0-9]+)\.8h, 0x81
     174  **	dup	z0\.q, z\1\.q\[0\]
     175  **	ret
     176  */
     177  TEST_UNIFORM_Z (dup_m130_u16, svuint16_t,
     178  		z0 = svdup_n_u16 (-130),
     179  		z0 = svdup_u16 (-130))
     180  
     181  /*
     182  ** dup_m254_u16:
     183  **	mvni	v([0-9]+)\.8h, 0xfd
     184  **	dup	z0\.q, z\1\.q\[0\]
     185  **	ret
     186  */
     187  TEST_UNIFORM_Z (dup_m254_u16, svuint16_t,
     188  		z0 = svdup_n_u16 (-254),
     189  		z0 = svdup_u16 (-254))
     190  
     191  /*
     192  ** dup_m255_u16:
     193  **	mov	z0\.h, #-255
     194  **	ret
     195  */
     196  TEST_UNIFORM_Z (dup_m255_u16, svuint16_t,
     197  		z0 = svdup_n_u16 (-255),
     198  		z0 = svdup_u16 (-255))
     199  
     200  /*
     201  ** dup_m256_u16:
     202  **	mov	z0\.h, #-256
     203  **	ret
     204  */
     205  TEST_UNIFORM_Z (dup_m256_u16, svuint16_t,
     206  		z0 = svdup_n_u16 (-256),
     207  		z0 = svdup_u16 (-256))
     208  
     209  /*
     210  ** dup_m257_u16:
     211  **	mov	z0\.h, #-257
     212  **	ret
     213  */
     214  TEST_UNIFORM_Z (dup_m257_u16, svuint16_t,
     215  		z0 = svdup_n_u16 (-257),
     216  		z0 = svdup_u16 (-257))
     217  
     218  /*
     219  ** dup_m258_u16:
     220  **	mov	z0\.b, #-2
     221  **	ret
     222  */
     223  TEST_UNIFORM_Z (dup_m258_u16, svuint16_t,
     224  		z0 = svdup_n_u16 (-258),
     225  		z0 = svdup_u16 (-258))
     226  
     227  /*
     228  ** dup_m259_u16:
     229  **	mov	(w[0-9]+), -259
     230  **	mov	z0\.h, \1
     231  **	ret
     232  */
     233  TEST_UNIFORM_Z (dup_m259_u16, svuint16_t,
     234  		z0 = svdup_n_u16 (-259),
     235  		z0 = svdup_u16 (-259))
     236  
     237  /*
     238  ** dup_m512_u16:
     239  **	mov	z0\.h, #-512
     240  **	ret
     241  */
     242  TEST_UNIFORM_Z (dup_m512_u16, svuint16_t,
     243  		z0 = svdup_n_u16 (-512),
     244  		z0 = svdup_u16 (-512))
     245  
     246  /*
     247  ** dup_m7f00_u16:
     248  **	mov	z0\.h, #-32512
     249  **	ret
     250  */
     251  TEST_UNIFORM_Z (dup_m7f00_u16, svuint16_t,
     252  		z0 = svdup_n_u16 (-0x7f00),
     253  		z0 = svdup_u16 (-0x7f00))
     254  
     255  /*
     256  ** dup_m7f01_u16:
     257  **	mov	z0\.h, #-32513
     258  **	ret
     259  */
     260  TEST_UNIFORM_Z (dup_m7f01_u16, svuint16_t,
     261  		z0 = svdup_n_u16 (-0x7f01),
     262  		z0 = svdup_u16 (-0x7f01))
     263  
     264  /*
     265  ** dup_m7f02_u16:
     266  **	mov	(w[0-9]+), -32514
     267  **	mov	z0\.h, \1
     268  **	ret
     269  */
     270  TEST_UNIFORM_Z (dup_m7f02_u16, svuint16_t,
     271  		z0 = svdup_n_u16 (-0x7f02),
     272  		z0 = svdup_u16 (-0x7f02))
     273  
     274  /*
     275  ** dup_m7ffe_u16:
     276  **	mov	(w[0-9]+), -32766
     277  **	mov	z0\.h, \1
     278  **	ret
     279  */
     280  TEST_UNIFORM_Z (dup_m7ffe_u16, svuint16_t,
     281  		z0 = svdup_n_u16 (-0x7ffe),
     282  		z0 = svdup_u16 (-0x7ffe))
     283  
     284  /*
     285  ** dup_m7fff_u16:
     286  **	mov	z0\.h, #-32767
     287  **	ret
     288  */
     289  TEST_UNIFORM_Z (dup_m7fff_u16, svuint16_t,
     290  		z0 = svdup_n_u16 (-0x7fff),
     291  		z0 = svdup_u16 (-0x7fff))
     292  
     293  /*
     294  ** dup_m8000_u16:
     295  **	mov	z0\.h, #-32768
     296  **	ret
     297  */
     298  TEST_UNIFORM_Z (dup_m8000_u16, svuint16_t,
     299  		z0 = svdup_n_u16 (-0x8000),
     300  		z0 = svdup_u16 (-0x8000))
     301  
     302  /*
     303  ** dup_w0_u16:
     304  **	mov	z0\.h, w0
     305  **	ret
     306  */
     307  TEST_UNIFORM_ZX (dup_w0_u16, svuint16_t, uint16_t,
     308  		 z0 = svdup_n_u16 (x0),
     309  		 z0 = svdup_u16 (x0))
     310  
     311  /*
     312  ** dup_1_u16_m:
     313  **	mov	z0\.h, p0/m, #1
     314  **	ret
     315  */
     316  TEST_UNIFORM_Z (dup_1_u16_m, svuint16_t,
     317  		z0 = svdup_n_u16_m (z0, p0, 1),
     318  		z0 = svdup_u16_m (z0, p0, 1))
     319  
     320  /*
     321  ** dup_127_u16_m:
     322  **	mov	z0\.h, p0/m, #127
     323  **	ret
     324  */
     325  TEST_UNIFORM_Z (dup_127_u16_m, svuint16_t,
     326  		z0 = svdup_n_u16_m (z0, p0, 127),
     327  		z0 = svdup_u16_m (z0, p0, 127))
     328  
     329  /*
     330  ** dup_128_u16_m:
     331  **	mov	(z[0-9]+\.h), #128
     332  **	sel	z0\.h, p0, \1, z0\.h
     333  **	ret
     334  */
     335  TEST_UNIFORM_Z (dup_128_u16_m, svuint16_t,
     336  		z0 = svdup_n_u16_m (z0, p0, 128),
     337  		z0 = svdup_u16_m (z0, p0, 128))
     338  
     339  /* TODO: Bad code and needs fixing.  */
     340  TEST_UNIFORM_Z (dup_129_u16_m, svuint16_t,
     341  		z0 = svdup_n_u16_m (z0, p0, 129),
     342  		z0 = svdup_u16_m (z0, p0, 129))
     343  
     344  /* TODO: Bad code and needs fixing.  */
     345  TEST_UNIFORM_Z (dup_253_u16_m, svuint16_t,
     346  		z0 = svdup_n_u16_m (z0, p0, 253),
     347  		z0 = svdup_u16_m (z0, p0, 253))
     348  
     349  /*
     350  ** dup_254_u16_m:
     351  **	mov	(z[0-9]+\.h), #254
     352  **	sel	z0\.h, p0, \1, z0\.h
     353  **	ret
     354  */
     355  TEST_UNIFORM_Z (dup_254_u16_m, svuint16_t,
     356  		z0 = svdup_n_u16_m (z0, p0, 254),
     357  		z0 = svdup_u16_m (z0, p0, 254))
     358  
     359  /*
     360  ** dup_255_u16_m:
     361  **	mov	(z[0-9]+\.h), #255
     362  **	sel	z0\.h, p0, \1, z0\.h
     363  **	ret
     364  */
     365  TEST_UNIFORM_Z (dup_255_u16_m, svuint16_t,
     366  		z0 = svdup_n_u16_m (z0, p0, 255),
     367  		z0 = svdup_u16_m (z0, p0, 255))
     368  
     369  /*
     370  ** dup_256_u16_m:
     371  **	mov	z0\.h, p0/m, #256
     372  **	ret
     373  */
     374  TEST_UNIFORM_Z (dup_256_u16_m, svuint16_t,
     375  		z0 = svdup_n_u16_m (z0, p0, 256),
     376  		z0 = svdup_u16_m (z0, p0, 256))
     377  
     378  /*
     379  ** dup_257_u16_m:
     380  **	mov	(z[0-9]+)\.b, #1
     381  **	sel	z0\.h, p0, \1\.h, z0\.h
     382  **	ret
     383  */
     384  TEST_UNIFORM_Z (dup_257_u16_m, svuint16_t,
     385  		z0 = svdup_n_u16_m (z0, p0, 257),
     386  		z0 = svdup_u16_m (z0, p0, 257))
     387  
     388  /*
     389  ** dup_512_u16_m:
     390  **	mov	z0\.h, p0/m, #512
     391  **	ret
     392  */
     393  TEST_UNIFORM_Z (dup_512_u16_m, svuint16_t,
     394  		z0 = svdup_n_u16_m (z0, p0, 512),
     395  		z0 = svdup_u16_m (z0, p0, 512))
     396  
     397  /*
     398  ** dup_7f00_u16_m:
     399  **	mov	z0\.h, p0/m, #32512
     400  **	ret
     401  */
     402  TEST_UNIFORM_Z (dup_7f00_u16_m, svuint16_t,
     403  		z0 = svdup_n_u16_m (z0, p0, 0x7f00),
     404  		z0 = svdup_u16_m (z0, p0, 0x7f00))
     405  
     406  /* TODO: Bad code and needs fixing.  */
     407  TEST_UNIFORM_Z (dup_7f01_u16_m, svuint16_t,
     408  		z0 = svdup_n_u16_m (z0, p0, 0x7f01),
     409  		z0 = svdup_u16_m (z0, p0, 0x7f01))
     410  
     411  /* TODO: Bad code and needs fixing.  */
     412  TEST_UNIFORM_Z (dup_7ffd_u16_m, svuint16_t,
     413  		z0 = svdup_n_u16_m (z0, p0, 0x7ffd),
     414  		z0 = svdup_u16_m (z0, p0, 0x7ffd))
     415  
     416  /*
     417  ** dup_7ffe_u16_m:
     418  **	mov	(z[0-9]+\.h), #32766
     419  **	sel	z0\.h, p0, \1, z0\.h
     420  **	ret
     421  */
     422  TEST_UNIFORM_Z (dup_7ffe_u16_m, svuint16_t,
     423  		z0 = svdup_n_u16_m (z0, p0, 0x7ffe),
     424  		z0 = svdup_u16_m (z0, p0, 0x7ffe))
     425  
     426  /*
     427  ** dup_7fff_u16_m:
     428  **	mov	(z[0-9]+\.h), #32767
     429  **	sel	z0\.h, p0, \1, z0\.h
     430  **	ret
     431  */
     432  TEST_UNIFORM_Z (dup_7fff_u16_m, svuint16_t,
     433  		z0 = svdup_n_u16_m (z0, p0, 0x7fff),
     434  		z0 = svdup_u16_m (z0, p0, 0x7fff))
     435  
     436  /*
     437  ** dup_m1_u16_m:
     438  **	mov	z0\.h, p0/m, #-1
     439  **	ret
     440  */
     441  TEST_UNIFORM_Z (dup_m1_u16_m, svuint16_t,
     442  		z0 = svdup_n_u16_m (z0, p0, -1),
     443  		z0 = svdup_u16_m (z0, p0, -1))
     444  
     445  /*
     446  ** dup_m128_u16_m:
     447  **	mov	z0\.h, p0/m, #-128
     448  **	ret
     449  */
     450  TEST_UNIFORM_Z (dup_m128_u16_m, svuint16_t,
     451  		z0 = svdup_n_u16_m (z0, p0, -128),
     452  		z0 = svdup_u16_m (z0, p0, -128))
     453  
     454  /*
     455  ** dup_m129_u16_m:
     456  **	mov	(z[0-9]+\.h), #-129
     457  **	sel	z0\.h, p0, \1, z0\.h
     458  **	ret
     459  */
     460  TEST_UNIFORM_Z (dup_m129_u16_m, svuint16_t,
     461  		z0 = svdup_n_u16_m (z0, p0, -129),
     462  		z0 = svdup_u16_m (z0, p0, -129))
     463  
     464  /* TODO: Bad code and needs fixing.  */
     465  TEST_UNIFORM_Z (dup_m130_u16_m, svuint16_t,
     466  		z0 = svdup_n_u16_m (z0, p0, -130),
     467  		z0 = svdup_u16_m (z0, p0, -130))
     468  
     469  /* TODO: Bad code and needs fixing.  */
     470  TEST_UNIFORM_Z (dup_m254_u16_m, svuint16_t,
     471  		z0 = svdup_n_u16_m (z0, p0, -254),
     472  		z0 = svdup_u16_m (z0, p0, -254))
     473  
     474  /*
     475  ** dup_m255_u16_m:
     476  **	mov	(z[0-9]+\.h), #-255
     477  **	sel	z0\.h, p0, \1, z0\.h
     478  **	ret
     479  */
     480  TEST_UNIFORM_Z (dup_m255_u16_m, svuint16_t,
     481  		z0 = svdup_n_u16_m (z0, p0, -255),
     482  		z0 = svdup_u16_m (z0, p0, -255))
     483  
     484  /*
     485  ** dup_m256_u16_m:
     486  **	mov	z0\.h, p0/m, #-256
     487  **	ret
     488  */
     489  TEST_UNIFORM_Z (dup_m256_u16_m, svuint16_t,
     490  		z0 = svdup_n_u16_m (z0, p0, -256),
     491  		z0 = svdup_u16_m (z0, p0, -256))
     492  
     493  /*
     494  ** dup_m257_u16_m:
     495  **	mov	(z[0-9]+\.h), #-257
     496  **	sel	z0\.h, p0, \1, z0\.h
     497  **	ret
     498  */
     499  TEST_UNIFORM_Z (dup_m257_u16_m, svuint16_t,
     500  		z0 = svdup_n_u16_m (z0, p0, -257),
     501  		z0 = svdup_u16_m (z0, p0, -257))
     502  
     503  /*
     504  ** dup_m258_u16_m:
     505  **	mov	(z[0-9]+)\.b, #-2
     506  **	sel	z0\.h, p0, \1\.h, z0\.h
     507  **	ret
     508  */
     509  TEST_UNIFORM_Z (dup_m258_u16_m, svuint16_t,
     510  		z0 = svdup_n_u16_m (z0, p0, -258),
     511  		z0 = svdup_u16_m (z0, p0, -258))
     512  
     513  /* TODO: Bad code and needs fixing.  */
     514  TEST_UNIFORM_Z (dup_m259_u16_m, svuint16_t,
     515  		z0 = svdup_n_u16_m (z0, p0, -259),
     516  		z0 = svdup_u16_m (z0, p0, -259))
     517  
     518  /*
     519  ** dup_m512_u16_m:
     520  **	mov	z0\.h, p0/m, #-512
     521  **	ret
     522  */
     523  TEST_UNIFORM_Z (dup_m512_u16_m, svuint16_t,
     524  		z0 = svdup_n_u16_m (z0, p0, -512),
     525  		z0 = svdup_u16_m (z0, p0, -512))
     526  
     527  /*
     528  ** dup_m7f00_u16_m:
     529  **	mov	z0\.h, p0/m, #-32512
     530  **	ret
     531  */
     532  TEST_UNIFORM_Z (dup_m7f00_u16_m, svuint16_t,
     533  		z0 = svdup_n_u16_m (z0, p0, -0x7f00),
     534  		z0 = svdup_u16_m (z0, p0, -0x7f00))
     535  
     536  /*
     537  ** dup_m7f01_u16_m:
     538  **	mov	(z[0-9]+\.h), #-32513
     539  **	sel	z0\.h, p0, \1, z0\.h
     540  **	ret
     541  */
     542  TEST_UNIFORM_Z (dup_m7f01_u16_m, svuint16_t,
     543  		z0 = svdup_n_u16_m (z0, p0, -0x7f01),
     544  		z0 = svdup_u16_m (z0, p0, -0x7f01))
     545  
     546  /* TODO: Bad code and needs fixing.  */
     547  TEST_UNIFORM_Z (dup_m7f02_u16_m, svuint16_t,
     548  		z0 = svdup_n_u16_m (z0, p0, -0x7f02),
     549  		z0 = svdup_u16_m (z0, p0, -0x7f02))
     550  
     551  /* TODO: Bad code and needs fixing.  */
     552  TEST_UNIFORM_Z (dup_m7ffe_u16_m, svuint16_t,
     553  		z0 = svdup_n_u16_m (z0, p0, -0x7ffe),
     554  		z0 = svdup_u16_m (z0, p0, -0x7ffe))
     555  
     556  /*
     557  ** dup_m7fff_u16_m:
     558  **	mov	(z[0-9]+\.h), #-32767
     559  **	sel	z0\.h, p0, \1, z0\.h
     560  **	ret
     561  */
     562  TEST_UNIFORM_Z (dup_m7fff_u16_m, svuint16_t,
     563  		z0 = svdup_n_u16_m (z0, p0, -0x7fff),
     564  		z0 = svdup_u16_m (z0, p0, -0x7fff))
     565  
     566  /*
     567  ** dup_m8000_u16_m:
     568  **	mov	z0\.h, p0/m, #-32768
     569  **	ret
     570  */
     571  TEST_UNIFORM_Z (dup_m8000_u16_m, svuint16_t,
     572  		z0 = svdup_n_u16_m (z0, p0, -0x8000),
     573  		z0 = svdup_u16_m (z0, p0, -0x8000))
     574  
     575  /*
     576  ** dup_0_u16_m:
     577  **	mov	z0\.h, p0/m, #0
     578  **	ret
     579  */
     580  TEST_UNIFORM_Z (dup_0_u16_m, svuint16_t,
     581  		z0 = svdup_n_u16_m (z0, p0, 0),
     582  		z0 = svdup_u16_m (z0, p0, 0))
     583  
     584  /*
     585  ** dup_w0_u16_m:
     586  **	movprfx	z0, z1
     587  **	mov	z0\.h, p0/m, w0
     588  **	ret
     589  */
     590  TEST_UNIFORM_ZX (dup_w0_u16_m, svuint16_t, uint16_t,
     591  		z0 = svdup_n_u16_m (z1, p0, x0),
     592  		z0 = svdup_u16_m (z1, p0, x0))
     593  
     594  /*
     595  ** dup_1_u16_z:
     596  **	mov	z0\.h, p0/z, #1
     597  **	ret
     598  */
     599  TEST_UNIFORM_Z (dup_1_u16_z, svuint16_t,
     600  		z0 = svdup_n_u16_z (p0, 1),
     601  		z0 = svdup_u16_z (p0, 1))
     602  
     603  /*
     604  ** dup_127_u16_z:
     605  **	mov	z0\.h, p0/z, #127
     606  **	ret
     607  */
     608  TEST_UNIFORM_Z (dup_127_u16_z, svuint16_t,
     609  		z0 = svdup_n_u16_z (p0, 127),
     610  		z0 = svdup_u16_z (p0, 127))
     611  
     612  /*
     613  ** dup_128_u16_z:
     614  **	mov	(z[0-9]+)\.b, #0
     615  **	mov	(z[0-9]+\.h), #128
     616  **	sel	z0\.h, p0, \2, \1\.h
     617  **	ret
     618  */
     619  TEST_UNIFORM_Z (dup_128_u16_z, svuint16_t,
     620  		z0 = svdup_n_u16_z (p0, 128),
     621  		z0 = svdup_u16_z (p0, 128))
     622  
     623  /* TODO: Bad code and needs fixing.  */
     624  TEST_UNIFORM_Z (dup_129_u16_z, svuint16_t,
     625  		z0 = svdup_n_u16_z (p0, 129),
     626  		z0 = svdup_u16_z (p0, 129))
     627  
     628  /* TODO: Bad code and needs fixing.  */
     629  TEST_UNIFORM_Z (dup_253_u16_z, svuint16_t,
     630  		z0 = svdup_n_u16_z (p0, 253),
     631  		z0 = svdup_u16_z (p0, 253))
     632  
     633  /*
     634  ** dup_254_u16_z:
     635  **	mov	(z[0-9]+)\.b, #0
     636  **	mov	(z[0-9]+\.h), #254
     637  **	sel	z0\.h, p0, \2, \1\.h
     638  **	ret
     639  */
     640  TEST_UNIFORM_Z (dup_254_u16_z, svuint16_t,
     641  		z0 = svdup_n_u16_z (p0, 254),
     642  		z0 = svdup_u16_z (p0, 254))
     643  
     644  /*
     645  ** dup_255_u16_z:
     646  **	mov	(z[0-9]+)\.b, #0
     647  **	mov	(z[0-9]+\.h), #255
     648  **	sel	z0\.h, p0, \2, \1\.h
     649  **	ret
     650  */
     651  TEST_UNIFORM_Z (dup_255_u16_z, svuint16_t,
     652  		z0 = svdup_n_u16_z (p0, 255),
     653  		z0 = svdup_u16_z (p0, 255))
     654  
     655  /*
     656  ** dup_256_u16_z:
     657  **	mov	z0\.h, p0/z, #256
     658  **	ret
     659  */
     660  TEST_UNIFORM_Z (dup_256_u16_z, svuint16_t,
     661  		z0 = svdup_n_u16_z (p0, 256),
     662  		z0 = svdup_u16_z (p0, 256))
     663  
     664  /*
     665  ** dup_257_u16_z:
     666  **	mov	(z[0-9]+)\.b, #0
     667  **	mov	(z[0-9]+)\.b, #1
     668  **	sel	z0\.h, p0, \2\.h, \1\.h
     669  **	ret
     670  */
     671  TEST_UNIFORM_Z (dup_257_u16_z, svuint16_t,
     672  		z0 = svdup_n_u16_z (p0, 257),
     673  		z0 = svdup_u16_z (p0, 257))
     674  
     675  /*
     676  ** dup_512_u16_z:
     677  **	mov	z0\.h, p0/z, #512
     678  **	ret
     679  */
     680  TEST_UNIFORM_Z (dup_512_u16_z, svuint16_t,
     681  		z0 = svdup_n_u16_z (p0, 512),
     682  		z0 = svdup_u16_z (p0, 512))
     683  
     684  /*
     685  ** dup_7f00_u16_z:
     686  **	mov	z0\.h, p0/z, #32512
     687  **	ret
     688  */
     689  TEST_UNIFORM_Z (dup_7f00_u16_z, svuint16_t,
     690  		z0 = svdup_n_u16_z (p0, 0x7f00),
     691  		z0 = svdup_u16_z (p0, 0x7f00))
     692  
     693  /* TODO: Bad code and needs fixing.  */
     694  TEST_UNIFORM_Z (dup_7f01_u16_z, svuint16_t,
     695  		z0 = svdup_n_u16_z (p0, 0x7f01),
     696  		z0 = svdup_u16_z (p0, 0x7f01))
     697  
     698  /* TODO: Bad code and needs fixing.  */
     699  TEST_UNIFORM_Z (dup_7ffd_u16_z, svuint16_t,
     700  		z0 = svdup_n_u16_z (p0, 0x7ffd),
     701  		z0 = svdup_u16_z (p0, 0x7ffd))
     702  
     703  /*
     704  ** dup_7ffe_u16_z:
     705  **	mov	(z[0-9]+)\.b, #0
     706  **	mov	(z[0-9]+\.h), #32766
     707  **	sel	z0\.h, p0, \2, \1\.h
     708  **	ret
     709  */
     710  TEST_UNIFORM_Z (dup_7ffe_u16_z, svuint16_t,
     711  		z0 = svdup_n_u16_z (p0, 0x7ffe),
     712  		z0 = svdup_u16_z (p0, 0x7ffe))
     713  
     714  /*
     715  ** dup_7fff_u16_z:
     716  **	mov	(z[0-9]+)\.b, #0
     717  **	mov	(z[0-9]+\.h), #32767
     718  **	sel	z0\.h, p0, \2, \1\.h
     719  **	ret
     720  */
     721  TEST_UNIFORM_Z (dup_7fff_u16_z, svuint16_t,
     722  		z0 = svdup_n_u16_z (p0, 0x7fff),
     723  		z0 = svdup_u16_z (p0, 0x7fff))
     724  
     725  /*
     726  ** dup_m1_u16_z:
     727  **	mov	z0\.h, p0/z, #-1
     728  **	ret
     729  */
     730  TEST_UNIFORM_Z (dup_m1_u16_z, svuint16_t,
     731  		z0 = svdup_n_u16_z (p0, -1),
     732  		z0 = svdup_u16_z (p0, -1))
     733  
     734  /*
     735  ** dup_m128_u16_z:
     736  **	mov	z0\.h, p0/z, #-128
     737  **	ret
     738  */
     739  TEST_UNIFORM_Z (dup_m128_u16_z, svuint16_t,
     740  		z0 = svdup_n_u16_z (p0, -128),
     741  		z0 = svdup_u16_z (p0, -128))
     742  
     743  /*
     744  ** dup_m129_u16_z:
     745  **	mov	(z[0-9]+)\.b, #0
     746  **	mov	(z[0-9]+\.h), #-129
     747  **	sel	z0\.h, p0, \2, \1\.h
     748  **	ret
     749  */
     750  TEST_UNIFORM_Z (dup_m129_u16_z, svuint16_t,
     751  		z0 = svdup_n_u16_z (p0, -129),
     752  		z0 = svdup_u16_z (p0, -129))
     753  
     754  /* TODO: Bad code and needs fixing.  */
     755  TEST_UNIFORM_Z (dup_m130_u16_z, svuint16_t,
     756  		z0 = svdup_n_u16_z (p0, -130),
     757  		z0 = svdup_u16_z (p0, -130))
     758  
     759  /* TODO: Bad code and needs fixing.  */
     760  TEST_UNIFORM_Z (dup_m254_u16_z, svuint16_t,
     761  		z0 = svdup_n_u16_z (p0, -254),
     762  		z0 = svdup_u16_z (p0, -254))
     763  
     764  /*
     765  ** dup_m255_u16_z:
     766  **	mov	(z[0-9]+)\.b, #0
     767  **	mov	(z[0-9]+\.h), #-255
     768  **	sel	z0\.h, p0, \2, \1\.h
     769  **	ret
     770  */
     771  TEST_UNIFORM_Z (dup_m255_u16_z, svuint16_t,
     772  		z0 = svdup_n_u16_z (p0, -255),
     773  		z0 = svdup_u16_z (p0, -255))
     774  
     775  /*
     776  ** dup_m256_u16_z:
     777  **	mov	z0\.h, p0/z, #-256
     778  **	ret
     779  */
     780  TEST_UNIFORM_Z (dup_m256_u16_z, svuint16_t,
     781  		z0 = svdup_n_u16_z (p0, -256),
     782  		z0 = svdup_u16_z (p0, -256))
     783  
     784  /*
     785  ** dup_m257_u16_z:
     786  **	mov	(z[0-9]+)\.b, #0
     787  **	mov	(z[0-9]+\.h), #-257
     788  **	sel	z0\.h, p0, \2, \1\.h
     789  **	ret
     790  */
     791  TEST_UNIFORM_Z (dup_m257_u16_z, svuint16_t,
     792  		z0 = svdup_n_u16_z (p0, -257),
     793  		z0 = svdup_u16_z (p0, -257))
     794  
     795  /*
     796  ** dup_m258_u16_z:
     797  **	mov	(z[0-9]+)\.b, #0
     798  **	mov	(z[0-9]+)\.b, #-2
     799  **	sel	z0\.h, p0, \2\.h, \1\.h
     800  **	ret
     801  */
     802  TEST_UNIFORM_Z (dup_m258_u16_z, svuint16_t,
     803  		z0 = svdup_n_u16_z (p0, -258),
     804  		z0 = svdup_u16_z (p0, -258))
     805  
     806  /* TODO: Bad code and needs fixing.  */
     807  TEST_UNIFORM_Z (dup_m259_u16_z, svuint16_t,
     808  		z0 = svdup_n_u16_z (p0, -259),
     809  		z0 = svdup_u16_z (p0, -259))
     810  
     811  /*
     812  ** dup_m512_u16_z:
     813  **	mov	z0\.h, p0/z, #-512
     814  **	ret
     815  */
     816  TEST_UNIFORM_Z (dup_m512_u16_z, svuint16_t,
     817  		z0 = svdup_n_u16_z (p0, -512),
     818  		z0 = svdup_u16_z (p0, -512))
     819  
     820  /*
     821  ** dup_m7f00_u16_z:
     822  **	mov	z0\.h, p0/z, #-32512
     823  **	ret
     824  */
     825  TEST_UNIFORM_Z (dup_m7f00_u16_z, svuint16_t,
     826  		z0 = svdup_n_u16_z (p0, -0x7f00),
     827  		z0 = svdup_u16_z (p0, -0x7f00))
     828  
     829  /*
     830  ** dup_m7f01_u16_z:
     831  **	mov	(z[0-9]+)\.b, #0
     832  **	mov	(z[0-9]+\.h), #-32513
     833  **	sel	z0\.h, p0, \2, \1\.h
     834  **	ret
     835  */
     836  TEST_UNIFORM_Z (dup_m7f01_u16_z, svuint16_t,
     837  		z0 = svdup_n_u16_z (p0, -0x7f01),
     838  		z0 = svdup_u16_z (p0, -0x7f01))
     839  
     840  /* TODO: Bad code and needs fixing.  */
     841  TEST_UNIFORM_Z (dup_m7f02_u16_z, svuint16_t,
     842  		z0 = svdup_n_u16_z (p0, -0x7f02),
     843  		z0 = svdup_u16_z (p0, -0x7f02))
     844  
     845  /* TODO: Bad code and needs fixing.  */
     846  TEST_UNIFORM_Z (dup_m7ffe_u16_z, svuint16_t,
     847  		z0 = svdup_n_u16_z (p0, -0x7ffe),
     848  		z0 = svdup_u16_z (p0, -0x7ffe))
     849  
     850  /*
     851  ** dup_m7fff_u16_z:
     852  **	mov	(z[0-9]+)\.b, #0
     853  **	mov	(z[0-9]+\.h), #-32767
     854  **	sel	z0\.h, p0, \2, \1\.h
     855  **	ret
     856  */
     857  TEST_UNIFORM_Z (dup_m7fff_u16_z, svuint16_t,
     858  		z0 = svdup_n_u16_z (p0, -0x7fff),
     859  		z0 = svdup_u16_z (p0, -0x7fff))
     860  
     861  /*
     862  ** dup_m8000_u16_z:
     863  **	mov	z0\.h, p0/z, #-32768
     864  **	ret
     865  */
     866  TEST_UNIFORM_Z (dup_m8000_u16_z, svuint16_t,
     867  		z0 = svdup_n_u16_z (p0, -0x8000),
     868  		z0 = svdup_u16_z (p0, -0x8000))
     869  
     870  /*
     871  ** dup_0_u16_z:
     872  **	mov	z0\.[bhsd], #0
     873  **	ret
     874  */
     875  TEST_UNIFORM_Z (dup_0_u16_z, svuint16_t,
     876  		z0 = svdup_n_u16_z (p0, 0),
     877  		z0 = svdup_u16_z (p0, 0))
     878  
     879  /*
     880  ** dup_w0_u16_z:
     881  **	movprfx	z0\.h, p0/z, z0\.h
     882  **	mov	z0\.h, p0/m, w0
     883  **	ret
     884  */
     885  TEST_UNIFORM_ZX (dup_w0_u16_z, svuint16_t, uint16_t,
     886  		z0 = svdup_n_u16_z (p0, x0),
     887  		z0 = svdup_u16_z (p0, x0))
     888  
     889  /*
     890  ** dup_1_u16_x:
     891  **	mov	z0\.h, #1
     892  **	ret
     893  */
     894  TEST_UNIFORM_Z (dup_1_u16_x, svuint16_t,
     895  		z0 = svdup_n_u16_x (p0, 1),
     896  		z0 = svdup_u16_x (p0, 1))
     897  
     898  /*
     899  ** dup_127_u16_x:
     900  **	mov	z0\.h, #127
     901  **	ret
     902  */
     903  TEST_UNIFORM_Z (dup_127_u16_x, svuint16_t,
     904  		z0 = svdup_n_u16_x (p0, 127),
     905  		z0 = svdup_u16_x (p0, 127))
     906  
     907  /*
     908  ** dup_128_u16_x:
     909  **	mov	z0\.h, #128
     910  **	ret
     911  */
     912  TEST_UNIFORM_Z (dup_128_u16_x, svuint16_t,
     913  		z0 = svdup_n_u16_x (p0, 128),
     914  		z0 = svdup_u16_x (p0, 128))
     915  
     916  /*
     917  ** dup_129_u16_x:
     918  **	movi	v([0-9]+)\.8h, 0x81
     919  **	dup	z0\.q, z\1\.q\[0\]
     920  **	ret
     921  */
     922  TEST_UNIFORM_Z (dup_129_u16_x, svuint16_t,
     923  		z0 = svdup_n_u16_x (p0, 129),
     924  		z0 = svdup_u16_x (p0, 129))
     925  
     926  /*
     927  ** dup_253_u16_x:
     928  **	movi	v([0-9]+)\.8h, 0xfd
     929  **	dup	z0\.q, z\1\.q\[0\]
     930  **	ret
     931  */
     932  TEST_UNIFORM_Z (dup_253_u16_x, svuint16_t,
     933  		z0 = svdup_n_u16_x (p0, 253),
     934  		z0 = svdup_u16_x (p0, 253))
     935  
     936  /*
     937  ** dup_254_u16_x:
     938  **	mov	z0\.h, #254
     939  **	ret
     940  */
     941  TEST_UNIFORM_Z (dup_254_u16_x, svuint16_t,
     942  		z0 = svdup_n_u16_x (p0, 254),
     943  		z0 = svdup_u16_x (p0, 254))
     944  
     945  /*
     946  ** dup_255_u16_x:
     947  **	mov	z0\.h, #255
     948  **	ret
     949  */
     950  TEST_UNIFORM_Z (dup_255_u16_x, svuint16_t,
     951  		z0 = svdup_n_u16_x (p0, 255),
     952  		z0 = svdup_u16_x (p0, 255))
     953  
     954  /*
     955  ** dup_256_u16_x:
     956  **	mov	z0\.h, #256
     957  **	ret
     958  */
     959  TEST_UNIFORM_Z (dup_256_u16_x, svuint16_t,
     960  		z0 = svdup_n_u16_x (p0, 256),
     961  		z0 = svdup_u16_x (p0, 256))
     962  
     963  /*
     964  ** dup_257_u16_x:
     965  **	mov	z0\.b, #1
     966  **	ret
     967  */
     968  TEST_UNIFORM_Z (dup_257_u16_x, svuint16_t,
     969  		z0 = svdup_n_u16_x (p0, 257),
     970  		z0 = svdup_u16_x (p0, 257))
     971  
     972  /*
     973  ** dup_512_u16_x:
     974  **	mov	z0\.h, #512
     975  **	ret
     976  */
     977  TEST_UNIFORM_Z (dup_512_u16_x, svuint16_t,
     978  		z0 = svdup_n_u16_x (p0, 512),
     979  		z0 = svdup_u16_x (p0, 512))
     980  
     981  /*
     982  ** dup_7f00_u16_x:
     983  **	mov	z0\.h, #32512
     984  **	ret
     985  */
     986  TEST_UNIFORM_Z (dup_7f00_u16_x, svuint16_t,
     987  		z0 = svdup_n_u16_x (p0, 0x7f00),
     988  		z0 = svdup_u16_x (p0, 0x7f00))
     989  
     990  /*
     991  ** dup_7f01_u16_x:
     992  **	mov	(w[0-9]+), 32513
     993  **	mov	z0\.h, \1
     994  **	ret
     995  */
     996  TEST_UNIFORM_Z (dup_7f01_u16_x, svuint16_t,
     997  		z0 = svdup_n_u16_x (p0, 0x7f01),
     998  		z0 = svdup_u16_x (p0, 0x7f01))
     999  
    1000  /*
    1001  ** dup_7ffd_u16_x:
    1002  **	mov	(w[0-9]+), 32765
    1003  **	mov	z0\.h, \1
    1004  **	ret
    1005  */
    1006  TEST_UNIFORM_Z (dup_7ffd_u16_x, svuint16_t,
    1007  		z0 = svdup_n_u16_x (p0, 0x7ffd),
    1008  		z0 = svdup_u16_x (p0, 0x7ffd))
    1009  
    1010  /*
    1011  ** dup_7ffe_u16_x:
    1012  **	mov	z0\.h, #32766
    1013  **	ret
    1014  */
    1015  TEST_UNIFORM_Z (dup_7ffe_u16_x, svuint16_t,
    1016  		z0 = svdup_n_u16_x (p0, 0x7ffe),
    1017  		z0 = svdup_u16_x (p0, 0x7ffe))
    1018  
    1019  /*
    1020  ** dup_7fff_u16_x:
    1021  **	mov	z0\.h, #32767
    1022  **	ret
    1023  */
    1024  TEST_UNIFORM_Z (dup_7fff_u16_x, svuint16_t,
    1025  		z0 = svdup_n_u16_x (p0, 0x7fff),
    1026  		z0 = svdup_u16_x (p0, 0x7fff))
    1027  
    1028  /*
    1029  ** dup_m1_u16_x:
    1030  **	mov	z0\.b, #-1
    1031  **	ret
    1032  */
    1033  TEST_UNIFORM_Z (dup_m1_u16_x, svuint16_t,
    1034  		z0 = svdup_n_u16_x (p0, -1),
    1035  		z0 = svdup_u16_x (p0, -1))
    1036  
    1037  /*
    1038  ** dup_m128_u16_x:
    1039  **	mov	z0\.h, #-128
    1040  **	ret
    1041  */
    1042  TEST_UNIFORM_Z (dup_m128_u16_x, svuint16_t,
    1043  		z0 = svdup_n_u16_x (p0, -128),
    1044  		z0 = svdup_u16_x (p0, -128))
    1045  
    1046  /*
    1047  ** dup_m129_u16_x:
    1048  **	mov	z0\.h, #-129
    1049  **	ret
    1050  */
    1051  TEST_UNIFORM_Z (dup_m129_u16_x, svuint16_t,
    1052  		z0 = svdup_n_u16_x (p0, -129),
    1053  		z0 = svdup_u16_x (p0, -129))
    1054  
    1055  /*
    1056  ** dup_m130_u16_x:
    1057  **	mvni	v([0-9]+)\.8h, 0x81
    1058  **	dup	z0\.q, z\1\.q\[0\]
    1059  **	ret
    1060  */
    1061  TEST_UNIFORM_Z (dup_m130_u16_x, svuint16_t,
    1062  		z0 = svdup_n_u16_x (p0, -130),
    1063  		z0 = svdup_u16_x (p0, -130))
    1064  
    1065  /*
    1066  ** dup_m254_u16_x:
    1067  **	mvni	v([0-9]+)\.8h, 0xfd
    1068  **	dup	z0\.q, z\1\.q\[0\]
    1069  **	ret
    1070  */
    1071  TEST_UNIFORM_Z (dup_m254_u16_x, svuint16_t,
    1072  		z0 = svdup_n_u16_x (p0, -254),
    1073  		z0 = svdup_u16_x (p0, -254))
    1074  
    1075  /*
    1076  ** dup_m255_u16_x:
    1077  **	mov	z0\.h, #-255
    1078  **	ret
    1079  */
    1080  TEST_UNIFORM_Z (dup_m255_u16_x, svuint16_t,
    1081  		z0 = svdup_n_u16_x (p0, -255),
    1082  		z0 = svdup_u16_x (p0, -255))
    1083  
    1084  /*
    1085  ** dup_m256_u16_x:
    1086  **	mov	z0\.h, #-256
    1087  **	ret
    1088  */
    1089  TEST_UNIFORM_Z (dup_m256_u16_x, svuint16_t,
    1090  		z0 = svdup_n_u16_x (p0, -256),
    1091  		z0 = svdup_u16_x (p0, -256))
    1092  
    1093  /*
    1094  ** dup_m257_u16_x:
    1095  **	mov	z0\.h, #-257
    1096  **	ret
    1097  */
    1098  TEST_UNIFORM_Z (dup_m257_u16_x, svuint16_t,
    1099  		z0 = svdup_n_u16_x (p0, -257),
    1100  		z0 = svdup_u16_x (p0, -257))
    1101  
    1102  /*
    1103  ** dup_m258_u16_x:
    1104  **	mov	z0\.b, #-2
    1105  **	ret
    1106  */
    1107  TEST_UNIFORM_Z (dup_m258_u16_x, svuint16_t,
    1108  		z0 = svdup_n_u16_x (p0, -258),
    1109  		z0 = svdup_u16_x (p0, -258))
    1110  
    1111  /*
    1112  ** dup_m259_u16_x:
    1113  **	mov	(w[0-9]+), -259
    1114  **	mov	z0\.h, \1
    1115  **	ret
    1116  */
    1117  TEST_UNIFORM_Z (dup_m259_u16_x, svuint16_t,
    1118  		z0 = svdup_n_u16_x (p0, -259),
    1119  		z0 = svdup_u16_x (p0, -259))
    1120  
    1121  /*
    1122  ** dup_m512_u16_x:
    1123  **	mov	z0\.h, #-512
    1124  **	ret
    1125  */
    1126  TEST_UNIFORM_Z (dup_m512_u16_x, svuint16_t,
    1127  		z0 = svdup_n_u16_x (p0, -512),
    1128  		z0 = svdup_u16_x (p0, -512))
    1129  
    1130  /*
    1131  ** dup_m7f00_u16_x:
    1132  **	mov	z0\.h, #-32512
    1133  **	ret
    1134  */
    1135  TEST_UNIFORM_Z (dup_m7f00_u16_x, svuint16_t,
    1136  		z0 = svdup_n_u16_x (p0, -0x7f00),
    1137  		z0 = svdup_u16_x (p0, -0x7f00))
    1138  
    1139  /*
    1140  ** dup_m7f01_u16_x:
    1141  **	mov	z0\.h, #-32513
    1142  **	ret
    1143  */
    1144  TEST_UNIFORM_Z (dup_m7f01_u16_x, svuint16_t,
    1145  		z0 = svdup_n_u16_x (p0, -0x7f01),
    1146  		z0 = svdup_u16_x (p0, -0x7f01))
    1147  
    1148  /*
    1149  ** dup_m7f02_u16_x:
    1150  **	mov	(w[0-9]+), -32514
    1151  **	mov	z0\.h, \1
    1152  **	ret
    1153  */
    1154  TEST_UNIFORM_Z (dup_m7f02_u16_x, svuint16_t,
    1155  		z0 = svdup_n_u16_x (p0, -0x7f02),
    1156  		z0 = svdup_u16_x (p0, -0x7f02))
    1157  
    1158  /*
    1159  ** dup_m7ffe_u16_x:
    1160  **	mov	(w[0-9]+), -32766
    1161  **	mov	z0\.h, \1
    1162  **	ret
    1163  */
    1164  TEST_UNIFORM_Z (dup_m7ffe_u16_x, svuint16_t,
    1165  		z0 = svdup_n_u16_x (p0, -0x7ffe),
    1166  		z0 = svdup_u16_x (p0, -0x7ffe))
    1167  
    1168  /*
    1169  ** dup_m7fff_u16_x:
    1170  **	mov	z0\.h, #-32767
    1171  **	ret
    1172  */
    1173  TEST_UNIFORM_Z (dup_m7fff_u16_x, svuint16_t,
    1174  		z0 = svdup_n_u16_x (p0, -0x7fff),
    1175  		z0 = svdup_u16_x (p0, -0x7fff))
    1176  
    1177  /*
    1178  ** dup_m8000_u16_x:
    1179  **	mov	z0\.h, #-32768
    1180  **	ret
    1181  */
    1182  TEST_UNIFORM_Z (dup_m8000_u16_x, svuint16_t,
    1183  		z0 = svdup_n_u16_x (p0, -0x8000),
    1184  		z0 = svdup_u16_x (p0, -0x8000))
    1185  
    1186  /*
    1187  ** dup_w0_u16_x:
    1188  **	mov	z0\.h, w0
    1189  **	ret
    1190  */
    1191  TEST_UNIFORM_ZX (dup_w0_u16_x, svuint16_t, uint16_t,
    1192  		z0 = svdup_n_u16_x (p0, x0),
    1193  		z0 = svdup_u16_x (p0, x0))