(root)/
gcc-13.2.0/
libgcc/
config/
csky/
linux-atomic.c
       1  /* Linux-specific atomic operations for C-SKY.
       2     Copyright (C) 2018-2023 Free Software Foundation, Inc.
       3     Contributed by C-SKY Microsystems and Mentor Graphics.
       4  
       5     This file is part of GCC.
       6  
       7     GCC is free software; you can redistribute it and/or modify it under
       8     the terms of the GNU General Public License as published by the Free
       9     Software Foundation; either version 3, or (at your option) any later
      10     version.
      11  
      12     GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      13     WARRANTY; without even the implied warranty of MERCHANTABILITY or
      14     FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      15     for more details.
      16  
      17     Under Section 7 of GPL version 3, you are granted additional
      18     permissions described in the GCC Runtime Library Exception, version
      19     3.1, as published by the Free Software Foundation.
      20  
      21     You should have received a copy of the GNU General Public License and
      22     a copy of the GCC Runtime Library Exception along with this program;
      23     see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
      24     <http://www.gnu.org/licenses/>.	 */
      25  
      26  /* Kernel helper for compare-and-exchange.  */
      27  inline int
      28  __kernel_cmpxchg (int oldval, int newval, volatile int *ptr)
      29  {
      30    register int _a0 asm ("a0") = oldval;
      31    register int _a1 asm ("a1") = newval;
      32    register volatile int *_a2 asm ("a2") = ptr;
      33    __asm__ __volatile__ ("trap	  2\n"	    \
      34  			:"+r" (_a0) :"r" (_a1) , "r" (_a2)	\
      35  			: "a3", "memory");			\
      36    return _a0;
      37  }
      38  
      39  
      40  /* Kernel helper for memory barrier.  */
      41  inline void __kernel_dmb (void)
      42  {
      43    asm ("sync":::"memory");
      44  }
      45  
      46  /* Note: we implement byte, short and int versions of atomic operations using
      47     the above kernel helpers, but there is no support for "long long" (64-bit)
      48     operations as yet.  */
      49  
      50  #define HIDDEN __attribute__ ((visibility ("hidden")))
      51  
      52  #ifdef __CSKYLE__
      53  #define INVERT_MASK_1 0
      54  #define INVERT_MASK_2 0
      55  #else
      56  #define INVERT_MASK_1 24
      57  #define INVERT_MASK_2 16
      58  #endif
      59  
      60  #define MASK_1 0xffu
      61  #define MASK_2 0xffffu
      62  
      63  #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP)				\
      64    int HIDDEN								\
      65    __sync_fetch_and_##OP##_4 (int *ptr, int val)				\
      66    {									\
      67      int failure, tmp;							\
      68  									\
      69      do									\
      70        {									\
      71  	tmp = *ptr;							\
      72  	failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr);	\
      73        }									\
      74      while (failure != 0);						\
      75  									\
      76      return tmp;								\
      77    }
      78  
      79  FETCH_AND_OP_WORD (add,	  , +)
      80  FETCH_AND_OP_WORD (sub,	  , -)
      81  FETCH_AND_OP_WORD (or,	  , |)
      82  FETCH_AND_OP_WORD (and,	  , &)
      83  FETCH_AND_OP_WORD (xor,	  , ^)
      84  FETCH_AND_OP_WORD (nand, ~, &)
      85  
      86  #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
      87  #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
      88  
      89  /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
      90     subword-sized quantities.  */
      91  
      92  #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN)	\
      93    TYPE HIDDEN								\
      94    NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val)			\
      95    {									\
      96      int *wordptr = (int *) ((unsigned int) ptr & ~3);			\
      97      unsigned int mask, shift, oldval, newval;				\
      98      int failure;							\
      99  									\
     100      shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;	\
     101      mask = MASK_##WIDTH << shift;					\
     102  									\
     103      do									\
     104        {									\
     105  	oldval = *wordptr;						\
     106  	newval = ((PFX_OP (((oldval & mask) >> shift)			\
     107  		   INF_OP (unsigned int) val)) << shift) & mask;	\
     108  	newval |= oldval & ~mask;					\
     109  	failure = __kernel_cmpxchg (oldval, newval, wordptr);		\
     110        }									\
     111      while (failure != 0);						\
     112  									\
     113      return (RETURN & mask) >> shift;					\
     114    }
     115  
     116  SUBWORD_SYNC_OP (add,	, +, unsigned short, 2, oldval)
     117  SUBWORD_SYNC_OP (sub,	, -, unsigned short, 2, oldval)
     118  SUBWORD_SYNC_OP (or,	, |, unsigned short, 2, oldval)
     119  SUBWORD_SYNC_OP (and,	, &, unsigned short, 2, oldval)
     120  SUBWORD_SYNC_OP (xor,	, ^, unsigned short, 2, oldval)
     121  SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, oldval)
     122  
     123  SUBWORD_SYNC_OP (add,	, +, unsigned char, 1, oldval)
     124  SUBWORD_SYNC_OP (sub,	, -, unsigned char, 1, oldval)
     125  SUBWORD_SYNC_OP (or,	, |, unsigned char, 1, oldval)
     126  SUBWORD_SYNC_OP (and,	, &, unsigned char, 1, oldval)
     127  SUBWORD_SYNC_OP (xor,	, ^, unsigned char, 1, oldval)
     128  SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, oldval)
     129  
     130  #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP)				\
     131    int HIDDEN								\
     132    __sync_##OP##_and_fetch_4 (int *ptr, int val)				\
     133    {									\
     134      int tmp, failure;							\
     135  									\
     136      do									\
     137        {									\
     138  	tmp = *ptr;							\
     139  	failure = __kernel_cmpxchg (tmp, PFX_OP tmp INF_OP val, ptr);	\
     140        }									\
     141      while (failure != 0);						\
     142  									\
     143      return PFX_OP tmp INF_OP val;					\
     144    }
     145  
     146  OP_AND_FETCH_WORD (add,	  , +)
     147  OP_AND_FETCH_WORD (sub,	  , -)
     148  OP_AND_FETCH_WORD (or,	  , |)
     149  OP_AND_FETCH_WORD (and,	  , &)
     150  OP_AND_FETCH_WORD (xor,	  , ^)
     151  OP_AND_FETCH_WORD (nand, ~, &)
     152  
     153  SUBWORD_SYNC_OP (add,	, +, unsigned short, 2, newval)
     154  SUBWORD_SYNC_OP (sub,	, -, unsigned short, 2, newval)
     155  SUBWORD_SYNC_OP (or,	, |, unsigned short, 2, newval)
     156  SUBWORD_SYNC_OP (and,	, &, unsigned short, 2, newval)
     157  SUBWORD_SYNC_OP (xor,	, ^, unsigned short, 2, newval)
     158  SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, newval)
     159  
     160  SUBWORD_SYNC_OP (add,	, +, unsigned char, 1, newval)
     161  SUBWORD_SYNC_OP (sub,	, -, unsigned char, 1, newval)
     162  SUBWORD_SYNC_OP (or,	, |, unsigned char, 1, newval)
     163  SUBWORD_SYNC_OP (and,	, &, unsigned char, 1, newval)
     164  SUBWORD_SYNC_OP (xor,	, ^, unsigned char, 1, newval)
     165  SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, newval)
     166  
     167  int HIDDEN
     168  __sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
     169  {
     170    int actual_oldval, fail;
     171  
     172    while (1)
     173      {
     174        actual_oldval = *ptr;
     175  
     176        if (oldval != actual_oldval)
     177  	return actual_oldval;
     178  
     179        fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
     180  
     181        if (!fail)
     182  	return oldval;
     183      }
     184  }
     185  
     186  #define SUBWORD_VAL_CAS(TYPE, WIDTH)					\
     187    TYPE HIDDEN								\
     188    __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval,		\
     189  				       TYPE newval)			\
     190    {									\
     191      int *wordptr = (int *)((unsigned int) ptr & ~3), fail;		\
     192      unsigned int mask, shift, actual_oldval, actual_newval;		\
     193  									\
     194      shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;	\
     195      mask = MASK_##WIDTH << shift;					\
     196  									\
     197      while (1)								\
     198        {									\
     199  	actual_oldval = *wordptr;					\
     200  									\
     201  	if (((actual_oldval & mask) >> shift) != (unsigned int) oldval)	\
     202  	  return (actual_oldval & mask) >> shift;			\
     203  									\
     204  	actual_newval = (actual_oldval & ~mask)				\
     205  			| (((unsigned int) newval << shift) & mask);	\
     206  									\
     207  	fail = __kernel_cmpxchg (actual_oldval, actual_newval,		\
     208  				 wordptr);				\
     209  									\
     210  	if (!fail)							\
     211  	  return oldval;						\
     212        }									\
     213    }
     214  
     215  SUBWORD_VAL_CAS (unsigned short, 2)
     216  SUBWORD_VAL_CAS (unsigned char,	1)
     217  
     218  typedef unsigned char bool;
     219  
     220  bool HIDDEN
     221  __sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
     222  {
     223    int failure = __kernel_cmpxchg (oldval, newval, ptr);
     224    return (failure == 0);
     225  }
     226  
     227  #define SUBWORD_BOOL_CAS(TYPE, WIDTH)					\
     228    bool HIDDEN								\
     229    __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval,		\
     230  					TYPE newval)			\
     231    {									\
     232      TYPE actual_oldval							\
     233        = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval);	\
     234      return (oldval == actual_oldval);					\
     235    }
     236  
     237  SUBWORD_BOOL_CAS (unsigned short, 2)
     238  SUBWORD_BOOL_CAS (unsigned char, 1)
     239  
     240  void HIDDEN
     241  __sync_synchronize (void)
     242  {
     243    __kernel_dmb ();
     244  }
     245  
     246  int HIDDEN
     247  __sync_lock_test_and_set_4 (int *ptr, int val)
     248  {
     249    int failure, oldval;
     250  
     251    do
     252      {
     253        oldval = *ptr;
     254        failure = __kernel_cmpxchg (oldval, val, ptr);
     255      }
     256    while (failure != 0);
     257  
     258    return oldval;
     259  }
     260  
     261  #define SUBWORD_TEST_AND_SET(TYPE, WIDTH)				\
     262    TYPE HIDDEN								\
     263    __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val)		\
     264    {									\
     265      int failure;							\
     266      unsigned int oldval, newval, shift, mask;				\
     267      int *wordptr = (int *) ((unsigned int) ptr & ~3);			\
     268  									\
     269      shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;	\
     270      mask = MASK_##WIDTH << shift;					\
     271  									\
     272      do									\
     273        {									\
     274  	oldval = *wordptr;						\
     275  	newval = ((oldval & ~mask)					\
     276  		  | (((unsigned int) val << shift) & mask));		\
     277  	failure = __kernel_cmpxchg (oldval, newval, wordptr);		\
     278        }									\
     279      while (failure != 0);						\
     280  									\
     281      return (oldval & mask) >> shift;					\
     282    }
     283  
     284  SUBWORD_TEST_AND_SET (unsigned short, 2)
     285  SUBWORD_TEST_AND_SET (unsigned char,  1)
     286  
     287  #define SYNC_LOCK_RELEASE(TYPE, WIDTH)					\
     288    void HIDDEN								\
     289    __sync_lock_release_##WIDTH (TYPE *ptr)				\
     290    {									\
     291      /* All writes before this point must be seen before we release	\
     292         the lock itself.	 */						\
     293      __kernel_dmb ();							\
     294      *ptr = 0;								\
     295    }
     296  
     297  SYNC_LOCK_RELEASE (int,	  4)
     298  SYNC_LOCK_RELEASE (short, 2)
     299  SYNC_LOCK_RELEASE (char,  1)