1  /* Classes for modeling the state of memory.
       2     Copyright (C) 2020-2023 Free Software Foundation, Inc.
       3     Contributed by David Malcolm <dmalcolm@redhat.com>.
       4  
       5  This file is part of GCC.
       6  
       7  GCC is free software; you can redistribute it and/or modify it
       8  under the terms of the GNU General Public License as published by
       9  the Free Software Foundation; either version 3, or (at your option)
      10  any later version.
      11  
      12  GCC is distributed in the hope that it will be useful, but
      13  WITHOUT ANY WARRANTY; without even the implied warranty of
      14  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      15  General Public License for more details.
      16  
      17  You should have received a copy of the GNU General Public License
      18  along with GCC; see the file COPYING3.  If not see
      19  <http://www.gnu.org/licenses/>.  */
      20  
      21  #ifndef GCC_ANALYZER_STORE_H
      22  #define GCC_ANALYZER_STORE_H
      23  
      24  /* Implementation of the region-based ternary model described in:
      25       "A Memory Model for Static Analysis of C Programs"
      26        (Zhongxing Xu, Ted Kremenek, and Jian Zhang)
      27       http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf  */
      28  
      29  /* The store models memory as a collection of "clusters", where regions
      30     are partitioned into clusters via their base region.
      31  
      32     For example, given:
      33       int a, b, c;
      34       struct coord { double x; double y; } verts[3];
      35     then "verts[0].y" and "verts[1].x" both have "verts" as their base region.
      36     Each of a, b, c, and verts will have their own clusters, so that we
      37     know that writes to e.g. "verts[1].x".don't affect e.g. "a".
      38  
      39     Within each cluster we store a map of bindings to values, where the
      40     binding keys can be either concrete or symbolic.
      41  
      42     Concrete bindings affect a specific range of bits relative to the start
      43     of the base region of the cluster, whereas symbolic bindings affect
      44     a specific subregion within the cluster.
      45  
      46     Consider (from the symbolic-1.c testcase):
      47  
      48       char arr[1024];
      49       arr[2] = a;  (1)
      50       arr[3] = b;  (2)
      51         After (1) and (2), the cluster for "arr" has concrete bindings
      52         for bits 16-23 and for bits 24-31, with svalues "INIT_VAL(a)"
      53         and "INIT_VAL(b)" respectively:
      54         cluster: {bits 16-23: "INIT_VAL(a)",
      55                   bits 24-31: "INIT_VAL(b)";
      56                   flags: {}}
      57         Attempting to query unbound subregions e.g. arr[4] will
      58         return "UNINITIALIZED".
      59         "a" and "b" are each in their own clusters, with no explicit
      60         bindings, and thus implicitly have value INIT_VAL(a) and INIT_VAL(b).
      61  
      62       arr[3] = c;  (3)
      63         After (3), the concrete binding for bits 24-31 is replaced with the
      64         svalue "INIT_VAL(c)":
      65         cluster: {bits 16-23: "INIT_VAL(a)",  (from before)
      66                   bits 24-31: "INIT_VAL(c)";  (updated)
      67                   flags: {}}
      68  
      69       arr[i] = d;  (4)
      70         After (4), we lose the concrete bindings and replace them with a
      71         symbolic binding for "arr[i]", with svalue "INIT_VAL(d)".  We also
      72         mark the cluster as having been "symbolically touched": future
      73         attempts to query the values of subregions other than "arr[i]",
      74         such as "arr[3]" are "UNKNOWN", since we don't know if the write
      75         to arr[i] affected them.
      76         cluster: {symbolic_key(arr[i]): "INIT_VAL(d)";
      77                   flags: {TOUCHED}}
      78  
      79       arr[j] = e;  (5)
      80         After (5), we lose the symbolic binding for "arr[i]" since we could
      81         have overwritten it, and add a symbolic binding for "arr[j]".
      82         cluster: {symbolic_key(arr[j]): "INIT_VAL(d)"; (different symbolic
      83                   flags: {TOUCHED}}                     binding)
      84  
      85       arr[3] = f;  (6)
      86         After (6), we lose the symbolic binding for "arr[j]" since we could
      87         have overwritten it, and gain a concrete binding for bits 24-31
      88         again, this time with svalue "INIT_VAL(e)":
      89         cluster: {bits 24-31: "INIT_VAL(d)";
      90                   flags: {TOUCHED}}
      91         The cluster is still flagged as touched, so that we know that
      92         accesses to other elements are "UNKNOWN" rather than
      93         "UNINITIALIZED".
      94  
      95     Handling symbolic regions requires us to handle aliasing.
      96  
      97     In the first example above, each of a, b, c and verts are non-symbolic
      98     base regions and so their clusters are "concrete clusters", whereas given:
      99         struct coord *p, *q;
     100     then "*p" and "*q" are symbolic base regions, and thus "*p" and "*q"
     101     have "symbolic clusters".
     102  
     103     In the above, "verts[i].x" will have a symbolic *binding* within a
     104     concrete cluster for "verts", whereas "*p" is a symbolic *cluster*.
     105  
     106     Writes to concrete clusters can't affect other concrete clusters,
     107     but can affect symbolic clusters; e.g. after:
     108         verts[0].x = 42;
     109     we bind 42 in the cluster for "verts", but the clusters for "b" and "c"
     110     can't be affected.  Any symbolic clusters for *p and for *q can be
     111     affected, *p and *q could alias verts.
     112  
     113     Writes to a symbolic cluster can affect other clusters, both
     114     concrete and symbolic; e.g. after:
     115         p->x = 17;
     116     we bind 17 within the cluster for "*p".  The concrete clusters for a, b,
     117     c, and verts could be affected, depending on whether *p aliases them.
     118     Similarly, the symbolic cluster to *q could be affected.  */
     119  
     120  namespace ana {
     121  
     122  /* A class for keeping track of aspects of a program_state that we don't
     123     know about, to avoid false positives about leaks.
     124  
     125     Consider:
     126  
     127        p->field = malloc (1024);
     128        q->field = NULL;
     129  
     130     where we don't know whether or not p and q point to the same memory,
     131     and:
     132  
     133        p->field = malloc (1024);
     134        unknown_fn (p);
     135  
     136     In both cases, the svalue for the address of the allocated buffer
     137     goes from being bound to p->field to not having anything explicitly bound
     138     to it.
     139  
     140     Given that we conservatively discard bindings due to possible aliasing or
     141     calls to unknown function, the store loses references to svalues,
     142     but these svalues could still be live.  We don't want to warn about
     143     them leaking - they're effectively in a "maybe live" state.
     144  
     145     This "maybe live" information is somewhat transient.
     146  
     147     We don't want to store this "maybe live" information in the program_state,
     148     region_model, or store, since we don't want to bloat these objects (and
     149     potentially bloat the exploded_graph with more nodes).
     150     However, we can't store it in the region_model_context, as these context
     151     objects sometimes don't last long enough to be around when comparing the
     152     old vs the new state.
     153  
     154     This class is a way to track a set of such svalues, so that we can
     155     temporarily capture that they are in a "maybe live" state whilst
     156     comparing old and new states.  */
     157  
     158  class uncertainty_t
     159  {
     160  public:
     161    typedef hash_set<const svalue *>::iterator iterator;
     162  
     163    void on_maybe_bound_sval (const svalue *sval)
     164    {
     165      m_maybe_bound_svals.add (sval);
     166    }
     167    void on_mutable_sval_at_unknown_call (const svalue *sval)
     168    {
     169      m_mutable_at_unknown_call_svals.add (sval);
     170    }
     171  
     172    bool unknown_sm_state_p (const svalue *sval)
     173    {
     174      return (m_maybe_bound_svals.contains (sval)
     175  	    || m_mutable_at_unknown_call_svals.contains (sval));
     176    }
     177  
     178    void dump_to_pp (pretty_printer *pp, bool simple) const;
     179    void dump (bool simple) const;
     180  
     181    iterator begin_maybe_bound_svals () const
     182    {
     183      return m_maybe_bound_svals.begin ();
     184    }
     185    iterator end_maybe_bound_svals () const
     186    {
     187      return m_maybe_bound_svals.end ();
     188    }
     189  
     190  private:
     191  
     192    /* svalues that might or might not still be bound.  */
     193    hash_set<const svalue *> m_maybe_bound_svals;
     194  
     195    /* svalues that have mutable sm-state at unknown calls.  */
     196    hash_set<const svalue *> m_mutable_at_unknown_call_svals;
     197  };
     198  
     199  class byte_range;
     200  class concrete_binding;
     201  class symbolic_binding;
     202  
     203  /* Abstract base class for describing ranges of bits within a binding_map
     204     that can have svalues bound to them.  */
     205  
     206  class binding_key
     207  {
     208  public:
     209    virtual ~binding_key () {}
     210    virtual bool concrete_p () const = 0;
     211    bool symbolic_p () const { return !concrete_p (); }
     212  
     213    static const binding_key *make (store_manager *mgr, const region *r);
     214  
     215    virtual void dump_to_pp (pretty_printer *pp, bool simple) const = 0;
     216    void dump (bool simple) const;
     217    label_text get_desc (bool simple=true) const;
     218  
     219    static int cmp_ptrs (const void *, const void *);
     220    static int cmp (const binding_key *, const binding_key *);
     221  
     222    virtual const concrete_binding *dyn_cast_concrete_binding () const
     223    { return NULL; }
     224    virtual const symbolic_binding *dyn_cast_symbolic_binding () const
     225    { return NULL; }
     226  };
     227  
     228  /* A concrete range of bits.  */
     229  
     230  struct bit_range
     231  {
     232    bit_range (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
     233    : m_start_bit_offset (start_bit_offset),
     234      m_size_in_bits (size_in_bits)
     235    {}
     236  
     237    void dump_to_pp (pretty_printer *pp) const;
     238    void dump () const;
     239  
     240    bool empty_p () const
     241    {
     242      return m_size_in_bits == 0;
     243    }
     244  
     245    bit_offset_t get_start_bit_offset () const
     246    {
     247      return m_start_bit_offset;
     248    }
     249    bit_offset_t get_next_bit_offset () const
     250    {
     251      return m_start_bit_offset + m_size_in_bits;
     252    }
     253    bit_offset_t get_last_bit_offset () const
     254    {
     255      gcc_assert (!empty_p ());
     256      return get_next_bit_offset () - 1;
     257    }
     258  
     259    bool contains_p (bit_offset_t offset) const
     260    {
     261      return (offset >= get_start_bit_offset ()
     262  	    && offset < get_next_bit_offset ());
     263    }
     264  
     265    bool contains_p (const bit_range &other, bit_range *out) const;
     266  
     267    bool operator== (const bit_range &other) const
     268    {
     269      return (m_start_bit_offset == other.m_start_bit_offset
     270  	    && m_size_in_bits == other.m_size_in_bits);
     271    }
     272  
     273    bool intersects_p (const bit_range &other) const
     274    {
     275      return (get_start_bit_offset () < other.get_next_bit_offset ()
     276  	    && other.get_start_bit_offset () < get_next_bit_offset ());
     277    }
     278    bool intersects_p (const bit_range &other,
     279  		     bit_range *out_this,
     280  		     bit_range *out_other) const;
     281  
     282    static int cmp (const bit_range &br1, const bit_range &br2);
     283  
     284    bit_range operator- (bit_offset_t offset) const;
     285  
     286    static bool from_mask (unsigned HOST_WIDE_INT mask, bit_range *out);
     287  
     288    bool as_byte_range (byte_range *out) const;
     289  
     290    bit_offset_t m_start_bit_offset;
     291    bit_size_t m_size_in_bits;
     292  };
     293  
     294  /* A concrete range of bytes.  */
     295  
     296  struct byte_range
     297  {
     298    byte_range (byte_offset_t start_byte_offset, byte_size_t size_in_bytes)
     299    : m_start_byte_offset (start_byte_offset),
     300      m_size_in_bytes (size_in_bytes)
     301    {}
     302  
     303    void dump_to_pp (pretty_printer *pp) const;
     304    void dump () const;
     305  
     306    bool empty_p () const
     307    {
     308      return m_size_in_bytes == 0;
     309    }
     310  
     311    bool contains_p (byte_offset_t offset) const
     312    {
     313      return (offset >= get_start_byte_offset ()
     314  	    && offset < get_next_byte_offset ());
     315    }
     316    bool contains_p (const byte_range &other, byte_range *out) const;
     317  
     318    bool operator== (const byte_range &other) const
     319    {
     320      return (m_start_byte_offset == other.m_start_byte_offset
     321  	    && m_size_in_bytes == other.m_size_in_bytes);
     322    }
     323  
     324    bool intersects_p (const byte_range &other,
     325  		     byte_size_t *out_num_overlap_bytes) const;
     326  
     327    bool exceeds_p (const byte_range &other,
     328  		  byte_range *out_overhanging_byte_range) const;
     329  
     330    bool falls_short_of_p (byte_offset_t offset,
     331  			 byte_range *out_fall_short_bytes) const;
     332  
     333    byte_offset_t get_start_byte_offset () const
     334    {
     335      return m_start_byte_offset;
     336    }
     337    byte_offset_t get_next_byte_offset () const
     338    {
     339      return m_start_byte_offset + m_size_in_bytes;
     340    }
     341    byte_offset_t get_last_byte_offset () const
     342    {
     343      gcc_assert (!empty_p ());
     344      return m_start_byte_offset + m_size_in_bytes - 1;
     345    }
     346  
     347    bit_range as_bit_range () const
     348    {
     349      return bit_range (m_start_byte_offset * BITS_PER_UNIT,
     350  		      m_size_in_bytes * BITS_PER_UNIT);
     351    }
     352  
     353    static int cmp (const byte_range &br1, const byte_range &br2);
     354  
     355    byte_offset_t m_start_byte_offset;
     356    byte_size_t m_size_in_bytes;
     357  };
     358  
     359  /* Concrete subclass of binding_key, for describing a non-empty
     360     concrete range of bits within the binding_map (e.g. "bits 8-15").  */
     361  
     362  class concrete_binding : public binding_key
     363  {
     364  public:
     365    /* This class is its own key for the purposes of consolidation.  */
     366    typedef concrete_binding key_t;
     367  
     368    concrete_binding (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
     369    : m_bit_range (start_bit_offset, size_in_bits)
     370    {
     371      gcc_assert (!m_bit_range.empty_p ());
     372    }
     373    bool concrete_p () const final override { return true; }
     374  
     375    hashval_t hash () const
     376    {
     377      inchash::hash hstate;
     378      hstate.add_wide_int (m_bit_range.m_start_bit_offset);
     379      hstate.add_wide_int (m_bit_range.m_size_in_bits);
     380      return hstate.end ();
     381    }
     382    bool operator== (const concrete_binding &other) const
     383    {
     384      return m_bit_range == other.m_bit_range;
     385    }
     386  
     387    void dump_to_pp (pretty_printer *pp, bool simple) const final override;
     388  
     389    const concrete_binding *dyn_cast_concrete_binding () const final override
     390    { return this; }
     391  
     392    const bit_range &get_bit_range () const { return m_bit_range; }
     393  
     394    bit_offset_t get_start_bit_offset () const
     395    {
     396      return m_bit_range.m_start_bit_offset;
     397    }
     398    bit_size_t get_size_in_bits () const
     399    {
     400      return m_bit_range.m_size_in_bits;
     401    }
     402    /* Return the next bit offset after the end of this binding.  */
     403    bit_offset_t get_next_bit_offset () const
     404    {
     405      return m_bit_range.get_next_bit_offset ();
     406    }
     407  
     408    bool overlaps_p (const concrete_binding &other) const;
     409  
     410    static int cmp_ptr_ptr (const void *, const void *);
     411  
     412    void mark_deleted () { m_bit_range.m_start_bit_offset = -1; }
     413    void mark_empty () { m_bit_range.m_start_bit_offset = -2; }
     414    bool is_deleted () const { return m_bit_range.m_start_bit_offset == -1; }
     415    bool is_empty () const { return m_bit_range.m_start_bit_offset == -2; }
     416  
     417  private:
     418    bit_range m_bit_range;
     419  };
     420  
     421  } // namespace ana
     422  
     423  template <>
     424  template <>
     425  inline bool
     426  is_a_helper <const ana::concrete_binding *>::test (const ana::binding_key *key)
     427  {
     428    return key->concrete_p ();
     429  }
     430  
     431  template <> struct default_hash_traits<ana::concrete_binding>
     432  : public member_function_hash_traits<ana::concrete_binding>
     433  {
     434    static const bool empty_zero_p = false;
     435  };
     436  
     437  namespace ana {
     438  
     439  /* Concrete subclass of binding_key, for describing a symbolic set of
     440     bits within the binding_map in terms of a region (e.g. "arr[i]").  */
     441  
     442  class symbolic_binding : public binding_key
     443  {
     444  public:
     445    /* This class is its own key for the purposes of consolidation.  */
     446    typedef symbolic_binding key_t;
     447  
     448    symbolic_binding (const region *region) : m_region (region) {}
     449    bool concrete_p () const final override { return false; }
     450  
     451    hashval_t hash () const
     452    {
     453      return (intptr_t)m_region;
     454    }
     455    bool operator== (const symbolic_binding &other) const
     456    {
     457      return m_region == other.m_region;
     458    }
     459  
     460    void dump_to_pp (pretty_printer *pp, bool simple) const final override;
     461  
     462    const symbolic_binding *dyn_cast_symbolic_binding () const final override
     463    { return this; }
     464  
     465    const region *get_region () const { return m_region; }
     466  
     467    static int cmp_ptr_ptr (const void *, const void *);
     468  
     469    void mark_deleted () { m_region = reinterpret_cast<const region *> (1); }
     470    void mark_empty () { m_region = NULL; }
     471    bool is_deleted () const
     472    { return m_region == reinterpret_cast<const region *> (1); }
     473    bool is_empty () const { return m_region == NULL; }
     474  
     475  private:
     476    const region *m_region;
     477  };
     478  
     479  } // namespace ana
     480  
     481  template <> struct default_hash_traits<ana::symbolic_binding>
     482  : public member_function_hash_traits<ana::symbolic_binding>
     483  {
     484    static const bool empty_zero_p = true;
     485  };
     486  
     487  namespace ana {
     488  
     489  /* A mapping from binding_keys to svalues, for use by binding_cluster
     490     and compound_svalue.  */
     491  
     492  class binding_map
     493  {
     494  public:
     495    typedef hash_map <const binding_key *, const svalue *> map_t;
     496    typedef map_t::iterator iterator_t;
     497  
     498    binding_map () : m_map () {}
     499    binding_map (const binding_map &other);
     500    binding_map& operator=(const binding_map &other);
     501  
     502    bool operator== (const binding_map &other) const;
     503    bool operator!= (const binding_map &other) const
     504    {
     505      return !(*this == other);
     506    }
     507  
     508    hashval_t hash () const;
     509  
     510    const svalue *get (const binding_key *key) const
     511    {
     512      const svalue **slot = const_cast<map_t &> (m_map).get (key);
     513      if (slot)
     514        return *slot;
     515      else
     516        return NULL;
     517    }
     518    bool put (const binding_key *k, const svalue *v)
     519    {
     520      gcc_assert (v);
     521      return m_map.put (k, v);
     522    }
     523  
     524    void remove (const binding_key *k) { m_map.remove (k); }
     525    void empty () { m_map.empty (); }
     526  
     527    iterator_t begin () const { return m_map.begin (); }
     528    iterator_t end () const { return m_map.end (); }
     529    size_t elements () const { return m_map.elements (); }
     530  
     531    void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
     532    void dump (bool simple) const;
     533  
     534    json::object *to_json () const;
     535  
     536    bool apply_ctor_to_region (const region *parent_reg, tree ctor,
     537  			     region_model_manager *mgr);
     538  
     539    static int cmp (const binding_map &map1, const binding_map &map2);
     540  
     541    void remove_overlapping_bindings (store_manager *mgr,
     542  				    const binding_key *drop_key,
     543  				    uncertainty_t *uncertainty,
     544  				    svalue_set *maybe_live_values,
     545  				    bool always_overlap);
     546  
     547  private:
     548    void get_overlapping_bindings (const binding_key *key,
     549  				 auto_vec<const binding_key *> *out);
     550    bool apply_ctor_val_to_range (const region *parent_reg,
     551  				region_model_manager *mgr,
     552  				tree min_index, tree max_index,
     553  				tree val);
     554    bool apply_ctor_pair_to_child_region (const region *parent_reg,
     555  					region_model_manager *mgr,
     556  					tree index, tree val);
     557  
     558    map_t m_map;
     559  };
     560  
     561  /* Concept: BindingVisitor, for use by binding_cluster::for_each_binding
     562     and store::for_each_binding.
     563  
     564     Should implement:
     565       void on_binding (const binding_key *key, const svalue *&sval);
     566  */
     567  
     568  /* All of the bindings within a store for regions that share the same
     569     base region.  */
     570  
     571  class binding_cluster
     572  {
     573  public:
     574    friend class store;
     575  
     576    typedef hash_map <const binding_key *, const svalue *> map_t;
     577    typedef map_t::iterator iterator_t;
     578  
     579    binding_cluster (const region *base_region);
     580    binding_cluster (const binding_cluster &other);
     581    binding_cluster& operator=(const binding_cluster &other);
     582  
     583    bool operator== (const binding_cluster &other) const;
     584    bool operator!= (const binding_cluster &other) const
     585    {
     586      return !(*this == other);
     587    }
     588  
     589    hashval_t hash () const;
     590  
     591    bool symbolic_p () const;
     592  
     593    const region *get_base_region () const { return m_base_region; }
     594  
     595    void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
     596    void dump (bool simple) const;
     597  
     598    void validate () const;
     599  
     600    json::object *to_json () const;
     601  
     602    void bind (store_manager *mgr, const region *, const svalue *);
     603  
     604    void clobber_region (store_manager *mgr, const region *reg);
     605    void purge_region (store_manager *mgr, const region *reg);
     606    void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
     607    void zero_fill_region (store_manager *mgr, const region *reg);
     608    void mark_region_as_unknown (store_manager *mgr,
     609  			       const region *reg_to_bind,
     610  			       const region *reg_for_overlap,
     611  			       uncertainty_t *uncertainty,
     612  			       svalue_set *maybe_live_values);
     613    void purge_state_involving (const svalue *sval,
     614  			      region_model_manager *sval_mgr);
     615  
     616    const svalue *get_binding (store_manager *mgr, const region *reg) const;
     617    const svalue *get_binding_recursive (store_manager *mgr,
     618  					const region *reg) const;
     619    const svalue *get_any_binding (store_manager *mgr,
     620  				  const region *reg) const;
     621    const svalue *maybe_get_compound_binding (store_manager *mgr,
     622  					     const region *reg) const;
     623  
     624    void remove_overlapping_bindings (store_manager *mgr, const region *reg,
     625  				    uncertainty_t *uncertainty,
     626  				    svalue_set *maybe_live_values);
     627  
     628    template <typename T>
     629    void for_each_value (void (*cb) (const svalue *sval, T user_data),
     630  		       T user_data) const
     631    {
     632      for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
     633        cb ((*iter).second, user_data);
     634    }
     635  
     636    static bool can_merge_p (const binding_cluster *cluster_a,
     637  			   const binding_cluster *cluster_b,
     638  			   binding_cluster *out_cluster,
     639  			   store *out_store,
     640  			   store_manager *mgr,
     641  			   model_merger *merger);
     642    void make_unknown_relative_to (const binding_cluster *other_cluster,
     643  				 store *out_store,
     644  				 store_manager *mgr);
     645  
     646    void mark_as_escaped ();
     647    void on_unknown_fncall (const gcall *call, store_manager *mgr,
     648  			  const conjured_purge &p);
     649    void on_asm (const gasm *stmt, store_manager *mgr,
     650  	       const conjured_purge &p);
     651  
     652    bool escaped_p () const;
     653    bool touched_p () const { return m_touched; }
     654  
     655    bool redundant_p () const;
     656    bool empty_p () const { return m_map.elements () == 0; }
     657  
     658    void get_representative_path_vars (const region_model *model,
     659  				     svalue_set *visited,
     660  				     const region *base_reg,
     661  				     const svalue *sval,
     662  				     auto_vec<path_var> *out_pvs) const;
     663  
     664    const svalue *maybe_get_simple_value (store_manager *mgr) const;
     665  
     666    template <typename BindingVisitor>
     667    void for_each_binding (BindingVisitor &v) const
     668    {
     669      for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
     670        {
     671  	const binding_key *key = (*iter).first;
     672  	const svalue *&sval = (*iter).second;
     673  	v.on_binding (key, sval);
     674        }
     675    }
     676  
     677    iterator_t begin () const { return m_map.begin (); }
     678    iterator_t end () const { return m_map.end (); }
     679  
     680    const binding_map &get_map () const { return m_map; }
     681  
     682  private:
     683    const svalue *get_any_value (const binding_key *key) const;
     684    void bind_compound_sval (store_manager *mgr,
     685  			   const region *reg,
     686  			   const compound_svalue *compound_sval);
     687    void bind_key (const binding_key *key, const svalue *sval);
     688  
     689    const region *m_base_region;
     690  
     691    binding_map m_map;
     692  
     693    /* Has a pointer to this cluster "escaped" into a part of the program
     694       we don't know about (via a call to a function with an unknown body,
     695       or by being passed in as a pointer param of a "top-level" function call).
     696       Such regions could be overwritten when other such functions are called,
     697       even if the region is no longer reachable by pointers that we are
     698       tracking. */
     699    bool m_escaped;
     700  
     701    /* Has this cluster been written to via a symbolic binding?
     702       If so, then we don't know anything about unbound subregions,
     703       so we can't use initial_svalue, treat them as uninitialized, or
     704       inherit values from a parent region.  */
     705    bool m_touched;
     706  };
     707  
     708  /* The mapping from regions to svalues.
     709     This is actually expressed by subdividing into clusters, to better
     710     handle aliasing.  */
     711  
     712  class store
     713  {
     714  public:
     715    typedef hash_map <const region *, binding_cluster *> cluster_map_t;
     716  
     717    store ();
     718    store (const store &other);
     719    ~store ();
     720  
     721    store &operator= (const store &other);
     722  
     723    bool operator== (const store &other) const;
     724    bool operator!= (const store &other) const
     725    {
     726      return !(*this == other);
     727    }
     728  
     729    hashval_t hash () const;
     730  
     731    void dump_to_pp (pretty_printer *pp, bool summarize, bool multiline,
     732  		   store_manager *mgr) const;
     733    void dump (bool simple) const;
     734    void summarize_to_pp (pretty_printer *pp, bool simple) const;
     735  
     736    void validate () const;
     737  
     738    json::object *to_json () const;
     739  
     740    const svalue *get_any_binding (store_manager *mgr, const region *reg) const;
     741  
     742    bool called_unknown_fn_p () const { return m_called_unknown_fn; }
     743  
     744    void set_value (store_manager *mgr, const region *lhs_reg,
     745  		  const svalue *rhs_sval,
     746  		  uncertainty_t *uncertainty);
     747    void clobber_region (store_manager *mgr, const region *reg);
     748    void purge_region (store_manager *mgr, const region *reg);
     749    void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
     750    void zero_fill_region (store_manager *mgr, const region *reg);
     751    void mark_region_as_unknown (store_manager *mgr, const region *reg,
     752  			       uncertainty_t *uncertainty,
     753  			       svalue_set *maybe_live_values);
     754    void purge_state_involving (const svalue *sval,
     755  			      region_model_manager *sval_mgr);
     756  
     757    const binding_cluster *get_cluster (const region *base_reg) const;
     758    binding_cluster *get_cluster (const region *base_reg);
     759    binding_cluster *get_or_create_cluster (const region *base_reg);
     760    void purge_cluster (const region *base_reg);
     761  
     762    template <typename T>
     763    void for_each_cluster (void (*cb) (const region *base_reg, T user_data),
     764  			 T user_data) const
     765    {
     766      for (cluster_map_t::iterator iter = m_cluster_map.begin ();
     767  	 iter != m_cluster_map.end (); ++iter)
     768        cb ((*iter).first, user_data);
     769    }
     770  
     771    static bool can_merge_p (const store *store_a, const store *store_b,
     772  			   store *out_store, store_manager *mgr,
     773  			   model_merger *merger);
     774  
     775    void mark_as_escaped (const region *base_reg);
     776    void on_unknown_fncall (const gcall *call, store_manager *mgr,
     777  			  const conjured_purge &p);
     778    bool escaped_p (const region *reg) const;
     779  
     780    void get_representative_path_vars (const region_model *model,
     781  				     svalue_set *visited,
     782  				     const svalue *sval,
     783  				     auto_vec<path_var> *out_pvs) const;
     784  
     785    cluster_map_t::iterator begin () const { return m_cluster_map.begin (); }
     786    cluster_map_t::iterator end () const { return m_cluster_map.end (); }
     787  
     788    tristate eval_alias (const region *base_reg_a,
     789  		       const region *base_reg_b) const;
     790  
     791    template <typename BindingVisitor>
     792    void for_each_binding (BindingVisitor &v)
     793    {
     794      for (cluster_map_t::iterator iter = m_cluster_map.begin ();
     795  	 iter != m_cluster_map.end (); ++iter)
     796        (*iter).second->for_each_binding (v);
     797    }
     798  
     799    void canonicalize (store_manager *mgr);
     800    void loop_replay_fixup (const store *other_store,
     801  			  region_model_manager *mgr);
     802  
     803    void replay_call_summary (call_summary_replay &r,
     804  			    const store &summary);
     805    void replay_call_summary_cluster (call_summary_replay &r,
     806  				    const store &summary,
     807  				    const region *base_reg);
     808    void on_maybe_live_values (const svalue_set &maybe_live_values);
     809  
     810  private:
     811    void remove_overlapping_bindings (store_manager *mgr, const region *reg,
     812  				    uncertainty_t *uncertainty);
     813    tristate eval_alias_1 (const region *base_reg_a,
     814  			 const region *base_reg_b) const;
     815  
     816    cluster_map_t m_cluster_map;
     817  
     818    /* If this is true, then unknown code has been called, and so
     819       any global variable that isn't currently modelled by the store
     820       has unknown state, rather than being in an "initial state".
     821       This is to avoid having to mark (and thus explicitly track)
     822       every global when an unknown function is called; instead, they
     823       can be tracked implicitly.  */
     824    bool m_called_unknown_fn;
     825  };
     826  
     827  /* A class responsible for owning and consolidating binding keys
     828     (both concrete and symbolic).
     829     Key instances are immutable as far as clients are concerned, so they
     830     are provided as "const" ptrs.  */
     831  
     832  class store_manager
     833  {
     834  public:
     835    store_manager (region_model_manager *mgr) : m_mgr (mgr) {}
     836  
     837    logger *get_logger () const;
     838  
     839    /* binding consolidation.  */
     840    const concrete_binding *
     841    get_concrete_binding (bit_offset_t start_bit_offset,
     842  			bit_offset_t size_in_bits);
     843    const concrete_binding *
     844    get_concrete_binding (const bit_range &bits)
     845    {
     846      return get_concrete_binding (bits.get_start_bit_offset (),
     847  				 bits.m_size_in_bits);
     848    }
     849    const symbolic_binding *
     850    get_symbolic_binding (const region *region);
     851  
     852    region_model_manager *get_svalue_manager () const
     853    {
     854      return m_mgr;
     855    }
     856  
     857    void log_stats (logger *logger, bool show_objs) const;
     858  
     859  private:
     860    region_model_manager *m_mgr;
     861    consolidation_map<concrete_binding> m_concrete_binding_key_mgr;
     862    consolidation_map<symbolic_binding> m_symbolic_binding_key_mgr;
     863  };
     864  
     865  } // namespace ana
     866  
     867  #endif /* GCC_ANALYZER_STORE_H */