(root)/
gcc-13.2.0/
gcc/
testsuite/
gcc.target/
aarch64/
sve/
acle/
asm/
dup_f64.c
       1  /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
       2  
       3  #include "test_sve_acle.h"
       4  
       5  /*
       6  ** dup_1_f64:
       7  **	fmov	z0\.d, #1\.0(?:e\+0)?
       8  **	ret
       9  */
      10  TEST_UNIFORM_Z (dup_1_f64, svfloat64_t,
      11  		z0 = svdup_n_f64 (1),
      12  		z0 = svdup_f64 (1))
      13  
      14  /*
      15  ** dup_0_f64:
      16  **	mov	z0\.d, #0
      17  **	ret
      18  */
      19  TEST_UNIFORM_Z (dup_0_f64, svfloat64_t,
      20  		z0 = svdup_n_f64 (0),
      21  		z0 = svdup_f64 (0))
      22  
      23  /*
      24  ** dup_8_f64:
      25  **	fmov	z0\.d, #8\.0(?:e\+0)?
      26  **	ret
      27  */
      28  TEST_UNIFORM_Z (dup_8_f64, svfloat64_t,
      29  		z0 = svdup_n_f64 (8),
      30  		z0 = svdup_f64 (8))
      31  
      32  /*
      33  ** dup_512_f64:
      34  **	mov	(x[0-9]+), 4647714815446351872
      35  **	mov	z0\.d, \1
      36  **	ret
      37  */
      38  TEST_UNIFORM_Z (dup_512_f64, svfloat64_t,
      39  		z0 = svdup_n_f64 (512),
      40  		z0 = svdup_f64 (512))
      41  
      42  /*
      43  ** dup_513_f64:
      44  **	...
      45  **	ld1rd	z0\.d, p[0-7]/z, \[x[0-9+]\]
      46  **	ret
      47  */
      48  TEST_UNIFORM_Z (dup_513_f64, svfloat64_t,
      49  		z0 = svdup_n_f64 (513),
      50  		z0 = svdup_f64 (513))
      51  
      52  /*
      53  ** dup_d4_f64:
      54  **	mov	z0\.d, d4
      55  **	ret
      56  */
      57  TEST_UNIFORM_ZD (dup_d4_f64, svfloat64_t, double,
      58  		z0 = svdup_n_f64 (d4),
      59  		z0 = svdup_f64 (d4))
      60  
      61  /*
      62  ** dup_1_f64_m:
      63  **	fmov	z0\.d, p0/m, #1\.0(?:e\+0)?
      64  **	ret
      65  */
      66  TEST_UNIFORM_Z (dup_1_f64_m, svfloat64_t,
      67  		z0 = svdup_n_f64_m (z0, p0, 1),
      68  		z0 = svdup_f64_m (z0, p0, 1))
      69  
      70  /*
      71  ** dup_0_f64_m:
      72  **	mov	z0\.d, p0/m, #0
      73  **	ret
      74  */
      75  TEST_UNIFORM_Z (dup_0_f64_m, svfloat64_t,
      76  		z0 = svdup_n_f64_m (z0, p0, 0),
      77  		z0 = svdup_f64_m (z0, p0, 0))
      78  
      79  /*
      80  ** dup_8_f64_m:
      81  **	fmov	z0\.d, p0/m, #8\.0(?:e\+0)?
      82  **	ret
      83  */
      84  TEST_UNIFORM_Z (dup_8_f64_m, svfloat64_t,
      85  		z0 = svdup_n_f64_m (z0, p0, 8),
      86  		z0 = svdup_f64_m (z0, p0, 8))
      87  
      88  /* TODO: Bad code and needs fixing.  */
      89  TEST_UNIFORM_Z (dup_512_f64_m, svfloat64_t,
      90  		z0 = svdup_n_f64_m (z0, p0, 512),
      91  		z0 = svdup_f64_m (z0, p0, 512))
      92  
      93  
      94  /* TODO: Bad code and needs fixing.  */
      95  TEST_UNIFORM_Z (dup_513_f64_m, svfloat64_t,
      96  		z0 = svdup_n_f64_m (z0, p0, 513),
      97  		z0 = svdup_f64_m (z0, p0, 513))
      98  
      99  /*
     100  ** dup_d4_f64_m:
     101  **	movprfx	z0, z1
     102  **	mov	z0\.d, p0/m, d4
     103  **	ret
     104  */
     105  TEST_UNIFORM_ZD (dup_d4_f64_m, svfloat64_t, double,
     106  		z0 = svdup_n_f64_m (z1, p0, d4),
     107  		z0 = svdup_f64_m (z1, p0, d4))
     108  
     109  /*
     110  ** dup_1_f64_z:
     111  **	movprfx	z0\.d, p0/z, z0\.d
     112  **	fmov	z0\.d, p0/m, #1\.0(?:e\+0)?
     113  **	ret
     114  */
     115  TEST_UNIFORM_Z (dup_1_f64_z, svfloat64_t,
     116  		z0 = svdup_n_f64_z (p0, 1),
     117  		z0 = svdup_f64_z (p0, 1))
     118  
     119  /*
     120  ** dup_0_f64_z:
     121  **	mov	z0\.[bhsd], #0
     122  **	ret
     123  */
     124  TEST_UNIFORM_Z (dup_0_f64_z, svfloat64_t,
     125  		z0 = svdup_n_f64_z (p0, 0),
     126  		z0 = svdup_f64_z (p0, 0))
     127  
     128  /*
     129  ** dup_8_f64_z:
     130  **	movprfx	z0\.d, p0/z, z0\.d
     131  **	fmov	z0\.d, p0/m, #8\.0(?:e\+0)?
     132  **	ret
     133  */
     134  TEST_UNIFORM_Z (dup_8_f64_z, svfloat64_t,
     135  		z0 = svdup_n_f64_z (p0, 8),
     136  		z0 = svdup_f64_z (p0, 8))
     137  
     138  /* TODO: Bad code and needs fixing.  */
     139  TEST_UNIFORM_Z (dup_512_f64_z, svfloat64_t,
     140  		z0 = svdup_n_f64_z (p0, 512),
     141  		z0 = svdup_f64_z (p0, 512))
     142  
     143  /* TODO: Bad code and needs fixing.  */
     144  TEST_UNIFORM_Z (dup_513_f64_z, svfloat64_t,
     145  		z0 = svdup_n_f64_z (p0, 513),
     146  		z0 = svdup_f64_z (p0, 513))
     147  
     148  /*
     149  ** dup_d4_f64_z:
     150  **	movprfx	z0\.d, p0/z, z0\.d
     151  **	mov	z0\.d, p0/m, d4
     152  **	ret
     153  */
     154  TEST_UNIFORM_ZD (dup_d4_f64_z, svfloat64_t, double,
     155  		z0 = svdup_n_f64_z (p0, d4),
     156  		z0 = svdup_f64_z (p0, d4))
     157  
     158  /*
     159  ** dup_1_f64_x:
     160  **	fmov	z0\.d, #1\.0(?:e\+0)?
     161  **	ret
     162  */
     163  TEST_UNIFORM_Z (dup_1_f64_x, svfloat64_t,
     164  		z0 = svdup_n_f64_x (p0, 1),
     165  		z0 = svdup_f64_x (p0, 1))
     166  
     167  /*
     168  ** dup_0_f64_x:
     169  **	mov	z0\.d, #0
     170  **	ret
     171  */
     172  TEST_UNIFORM_Z (dup_0_f64_x, svfloat64_t,
     173  		z0 = svdup_n_f64_x (p0, 0),
     174  		z0 = svdup_f64_x (p0, 0))
     175  
     176  /*
     177  ** dup_8_f64_x:
     178  **	fmov	z0\.d, #8\.0(?:e\+0)?
     179  **	ret
     180  */
     181  TEST_UNIFORM_Z (dup_8_f64_x, svfloat64_t,
     182  		z0 = svdup_n_f64_x (p0, 8),
     183  		z0 = svdup_f64_x (p0, 8))
     184  
     185  /*
     186  ** dup_512_f64_x:
     187  **	mov	(x[0-9]+), 4647714815446351872
     188  **	mov	z0\.d, \1
     189  **	ret
     190  */
     191  TEST_UNIFORM_Z (dup_512_f64_x, svfloat64_t,
     192  		z0 = svdup_n_f64_x (p0, 512),
     193  		z0 = svdup_f64_x (p0, 512))
     194  
     195  /*
     196  ** dup_513_f64_x:
     197  **	...
     198  **	ld1rd	z0\.d, p[0-7]/z, \[x[0-9+]\]
     199  **	ret
     200  */
     201  TEST_UNIFORM_Z (dup_513_f64_x, svfloat64_t,
     202  		z0 = svdup_n_f64_x (p0, 513),
     203  		z0 = svdup_f64_x (p0, 513))
     204  
     205  /*
     206  ** dup_d4_f64_x:
     207  **	mov	z0\.d, d4
     208  **	ret
     209  */
     210  TEST_UNIFORM_ZD (dup_d4_f64_x, svfloat64_t, double,
     211  		z0 = svdup_n_f64_x (p0, d4),
     212  		z0 = svdup_f64_x (p0, d4))