(root)/
gcc-13.2.0/
libgcc/
config/
aarch64/
sync-cache.c
       1  /* Machine description for AArch64 architecture.
       2     Copyright (C) 2012-2023 Free Software Foundation, Inc.
       3     Contributed by ARM Ltd.
       4  
       5  This file is part of GCC.
       6  
       7  GCC is free software; you can redistribute it and/or modify it under
       8  the terms of the GNU General Public License as published by the Free
       9  Software Foundation; either version 3, or (at your option) any later
      10  version.
      11  
      12  GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      13  WARRANTY; without even the implied warranty of MERCHANTABILITY or
      14  FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      15  for more details.
      16  
      17  Under Section 7 of GPL version 3, you are granted additional
      18  permissions described in the GCC Runtime Library Exception, version
      19  3.1, as published by the Free Software Foundation.
      20  
      21  You should have received a copy of the GNU General Public License and
      22  a copy of the GCC Runtime Library Exception along with this program;
      23  see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
      24  <http://www.gnu.org/licenses/>.  */
      25  
      26  #define CTR_IDC_SHIFT           28
      27  #define CTR_DIC_SHIFT           29
      28  
      29  void __aarch64_sync_cache_range (const void *, const void *);
      30  
      31  void
      32  __aarch64_sync_cache_range (const void *base, const void *end)
      33  {
      34    unsigned icache_lsize;
      35    unsigned dcache_lsize;
      36    static unsigned int cache_info = 0;
      37    const char *address;
      38  
      39    if (! cache_info)
      40      /* CTR_EL0 [3:0] contains log2 of icache line size in words.
      41         CTR_EL0 [19:16] contains log2 of dcache line size in words.  */
      42      asm volatile ("mrs\t%0, ctr_el0":"=r" (cache_info));
      43  
      44    icache_lsize = 4 << (cache_info & 0xF);
      45    dcache_lsize = 4 << ((cache_info >> 16) & 0xF);
      46  
      47    /* If CTR_EL0.IDC is enabled, Data cache clean to the Point of Unification is
      48       not required for instruction to data coherence.  */
      49  
      50    if (((cache_info >> CTR_IDC_SHIFT) & 0x1) == 0x0) {
      51      /* Loop over the address range, clearing one cache line at once.
      52         Data cache must be flushed to unification first to make sure the
      53         instruction cache fetches the updated data.  'end' is exclusive,
      54         as per the GNU definition of __clear_cache.  */
      55  
      56      /* Make the start address of the loop cache aligned.  */
      57      address = (const char*) ((__UINTPTR_TYPE__) base
      58  			     & ~ (__UINTPTR_TYPE__) (dcache_lsize - 1));
      59  
      60      for (; address < (const char *) end; address += dcache_lsize)
      61        asm volatile ("dc\tcvau, %0"
      62  		    :
      63  		    : "r" (address)
      64  		    : "memory");
      65    }
      66  
      67    asm volatile ("dsb\tish" : : : "memory");
      68  
      69    /* If CTR_EL0.DIC is enabled, Instruction cache cleaning to the Point of
      70       Unification is not required for instruction to data coherence.  */
      71  
      72    if (((cache_info >> CTR_DIC_SHIFT) & 0x1) == 0x0) {
      73      /* Make the start address of the loop cache aligned.  */
      74      address = (const char*) ((__UINTPTR_TYPE__) base
      75  			     & ~ (__UINTPTR_TYPE__) (icache_lsize - 1));
      76  
      77      for (; address < (const char *) end; address += icache_lsize)
      78        asm volatile ("ic\tivau, %0"
      79  		    :
      80  		    : "r" (address)
      81  		    : "memory");
      82  
      83      asm volatile ("dsb\tish" : : : "memory");
      84    }
      85  
      86    asm volatile("isb" : : : "memory");
      87  }