On Thu, Mar 21, 2019 at 09:28:06AM +0100, Richard Biener wrote:
> > Well, this is surely more costly in that it allocates both the hash_table
> > structure and the memory pointed by it from GC.
> 
> You could use
> 
>   char constexpr_fundef_table[sizeof(hash-table)];
>   bool constexpr_fundef_table_initialized_p = false;
> 
>   if (!constexpr_fundef_table_initialized_p)
>     new (constexpr_fundef_table) hash-table (...);
> 
> and hide this pattern behind a new RAII class?

Do we have a portable C++98 way to make sure the buffer has proper alignment
though?  Plus we'd need to pass around this auto_hash_set instead of
hash_set, so from the usage POV it would be like the following patches
where the functionality is embedded in hash_set instantiations itself with a
special new template parameter.

> > I guess another option would be to make the decision whether
> > the hash_{table,set,map} is constructed with allocation right away (and no
> > runtime checks for it) or if it is lazy another template argument (bool
> > Lazy = false before the Allocator template argument), if !Lazy, it would
> > work exactly as it is now, if Lazy it would not allocate it in the ctor
> > and where needed would add m_entries checks.
> 
> That's a possibility, but what about the above RAII trick?  Not sure
> if there's a way to avoid the char[] use and instead use "unconstructed"
> properly typed storage.

Attached (so far without changelog, selftest additions and only tested with
make -j32 -k check-c++-all RUNTESTFLAGS="dg.exp='pr89767.C *lambda* *desig*'"
) is
1) hash_{table,set} <whatever, true> implementation for lazy allocation
2) the PR c++/89767 patch with optimization for zero and one entries
3) incremental PR c++/71446 fix to simplify that code using this new
   infrastructure

For 2), because of the 0 and 1 entry optimization, we still need to test
ids->elements () == 0 and special case that case, but a version that would
optimize solely zero captures case could go without such checks, just
if (ids->add (name)) and no other ids->*.  And in 3), while the addition
doesn't use a condition, I use pset.elements () check as an optimization to
avoid calling the recursive function in the most common case where there are
no designators, pset.elements () != 0 check is just a comparison of a
subtraction of two fields against 0.

        Jakub
--- gcc/hash-table.h.jj 2019-03-14 23:46:28.593616750 +0100
+++ gcc/hash-table.h    2019-03-21 09:27:19.722074003 +0100
@@ -167,6 +167,15 @@ along with GCC; see the file COPYING3.
    See hash_table for details.  The interface is very similar to libiberty's
    htab_t.
 
+   If a hash table is used only in some rare cases, it is possible
+   to construct the hash_table lazily before first use.  This is done
+   through:
+
+      hash_table <some_type_hasher, true> some_type_hash_table;
+
+   which will cause whatever methods actually need the allocated entries
+   array to allocate it later.
+
 
    EASY DESCRIPTORS FOR POINTERS
 
@@ -241,7 +250,7 @@ along with GCC; see the file COPYING3.
 #include "hash-map-traits.h"
 
 template<typename, typename, typename> class hash_map;
-template<typename, typename> class hash_set;
+template<typename, bool, typename> class hash_set;
 
 /* The ordinary memory allocator.  */
 /* FIXME (crowl): This allocator may be extracted for wider sharing later.  */
@@ -353,8 +362,8 @@ class mem_usage;
      hash table code.
 
 */
-template <typename Descriptor,
-        template<typename Type> class Allocator = xcallocator>
+template <typename Descriptor, bool Lazy = false,
+         template<typename Type> class Allocator = xcallocator>
 class hash_table
 {
   typedef typename Descriptor::value_type value_type;
@@ -422,7 +431,7 @@ public:
      write the value you want into the returned slot.  When inserting an
      entry, NULL may be returned if memory allocation fails. */
   value_type *find_slot_with_hash (const compare_type &comparable,
-                                   hashval_t hash, enum insert_option insert);
+                                  hashval_t hash, enum insert_option insert);
 
   /* This function deletes an element with the given COMPARABLE value
      from hash table starting with the given HASH.  If there is no
@@ -472,6 +481,8 @@ public:
 
   iterator begin () const
     {
+      if (Lazy && m_entries == NULL)
+       return iterator ();
       iterator iter (m_entries, m_entries + m_size);
       iter.slide ();
       return iter;
@@ -491,9 +502,8 @@ private:
     hashtab_entry_note_pointers (void *, void *, gt_pointer_operator, void *);
   template<typename T, typename U, typename V> friend void
   gt_pch_nx (hash_map<T, U, V> *, gt_pointer_operator, void *);
-  template<typename T, typename U> friend void gt_pch_nx (hash_set<T, U> *,
-                                                         gt_pointer_operator,
-                                                         void *);
+  template<typename T, typename U>
+  friend void gt_pch_nx (hash_set<T, false, U> *, gt_pointer_operator, void *);
   template<typename T> friend void gt_pch_nx (hash_table<T> *,
                                              gt_pointer_operator, void *);
 
@@ -566,11 +576,12 @@ extern mem_alloc_description<mem_usage>&
 /* Support function for statistics.  */
 extern void dump_hash_table_loc_statistics (void);
 
-template<typename Descriptor, template<typename Type> class Allocator>
-hash_table<Descriptor, Allocator>::hash_table (size_t size, bool ggc, bool
-                                              gather_mem_stats,
-                                              mem_alloc_origin origin
-                                              MEM_STAT_DECL) :
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+hash_table<Descriptor, Lazy, Allocator>::hash_table (size_t size, bool ggc,
+                                                    bool gather_mem_stats,
+                                                    mem_alloc_origin origin
+                                                    MEM_STAT_DECL) :
   m_n_elements (0), m_n_deleted (0), m_searches (0), m_collisions (0),
   m_ggc (ggc), m_gather_mem_stats (gather_mem_stats)
 {
@@ -581,18 +592,23 @@ hash_table<Descriptor, Allocator>::hash_
 
   if (m_gather_mem_stats)
     hash_table_usage ().register_descriptor (this, origin, ggc
-                                         FINAL_PASS_MEM_STAT);
+                                            FINAL_PASS_MEM_STAT);
 
-  m_entries = alloc_entries (size PASS_MEM_STAT);
+  if (Lazy)
+    m_entries = NULL;
+  else
+    m_entries = alloc_entries (size PASS_MEM_STAT);
   m_size = size;
   m_size_prime_index = size_prime_index;
 }
 
-template<typename Descriptor, template<typename Type> class Allocator>
-hash_table<Descriptor, Allocator>::hash_table (const hash_table &h, bool ggc,
-                                              bool gather_mem_stats,
-                                              mem_alloc_origin origin
-                                              MEM_STAT_DECL) :
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+hash_table<Descriptor, Lazy, Allocator>::hash_table (const hash_table &h,
+                                                    bool ggc,
+                                                    bool gather_mem_stats,
+                                                    mem_alloc_origin origin
+                                                    MEM_STAT_DECL) :
   m_n_elements (h.m_n_elements), m_n_deleted (h.m_n_deleted),
   m_searches (0), m_collisions (0), m_ggc (ggc),
   m_gather_mem_stats (gather_mem_stats)
@@ -603,43 +619,54 @@ hash_table<Descriptor, Allocator>::hash_
     hash_table_usage ().register_descriptor (this, origin, ggc
                                          FINAL_PASS_MEM_STAT);
 
-  value_type *nentries = alloc_entries (size PASS_MEM_STAT);
-  for (size_t i = 0; i < size; ++i)
+  if (Lazy && h.m_entries == NULL)
+    m_entries = NULL;
+  else
     {
-      value_type &entry = h.m_entries[i];
-      if (is_deleted (entry))
-       mark_deleted (nentries[i]);
-      else if (!is_empty (entry))
-       nentries[i] = entry;
+      value_type *nentries = alloc_entries (size PASS_MEM_STAT);
+      for (size_t i = 0; i < size; ++i)
+       {
+         value_type &entry = h.m_entries[i];
+         if (is_deleted (entry))
+           mark_deleted (nentries[i]);
+         else if (!is_empty (entry))
+           nentries[i] = entry;
+       }
+      m_entries = nentries;
     }
-  m_entries = nentries;
   m_size = size;
   m_size_prime_index = h.m_size_prime_index;
 }
 
-template<typename Descriptor, template<typename Type> class Allocator>
-hash_table<Descriptor, Allocator>::~hash_table ()
-{
-  for (size_t i = m_size - 1; i < m_size; i--)
-    if (!is_empty (m_entries[i]) && !is_deleted (m_entries[i]))
-      Descriptor::remove (m_entries[i]);
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+hash_table<Descriptor, Lazy, Allocator>::~hash_table ()
+{
+  if (!Lazy || m_entries)
+    {
+      for (size_t i = m_size - 1; i < m_size; i--)
+       if (!is_empty (m_entries[i]) && !is_deleted (m_entries[i]))
+         Descriptor::remove (m_entries[i]);
 
-  if (!m_ggc)
-    Allocator <value_type> ::data_free (m_entries);
-  else
-    ggc_free (m_entries);
+      if (!m_ggc)
+       Allocator <value_type> ::data_free (m_entries);
+      else
+       ggc_free (m_entries);
+    }
 
   if (m_gather_mem_stats)
     hash_table_usage ().release_instance_overhead (this,
-                                               sizeof (value_type) * m_size,
-                                               true);
+                                                  sizeof (value_type)
+                                                  * m_size, true);
 }
 
 /* This function returns an array of empty hash table elements.  */
 
-template<typename Descriptor, template<typename Type> class Allocator>
-inline typename hash_table<Descriptor, Allocator>::value_type *
-hash_table<Descriptor, Allocator>::alloc_entries (size_t n MEM_STAT_DECL) const
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+inline typename hash_table<Descriptor, Lazy, Allocator>::value_type *
+hash_table<Descriptor, Lazy,
+          Allocator>::alloc_entries (size_t n MEM_STAT_DECL) const
 {
   value_type *nentries;
 
@@ -665,9 +692,11 @@ hash_table<Descriptor, Allocator>::alloc
    This function also assumes there are no deleted entries in the table.
    HASH is the hash value for the element to be inserted.  */
 
-template<typename Descriptor, template<typename Type> class Allocator>
-typename hash_table<Descriptor, Allocator>::value_type *
-hash_table<Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+typename hash_table<Descriptor, Lazy, Allocator>::value_type *
+hash_table<Descriptor, Lazy,
+          Allocator>::find_empty_slot_for_expand (hashval_t hash)
 {
   hashval_t index = hash_table_mod1 (hash, m_size_prime_index);
   size_t size = m_size;
@@ -694,9 +723,10 @@ hash_table<Descriptor, Allocator>::find_
 
 /* Return true if the current table is excessively big for ELTS elements.  */
 
-template<typename Descriptor, template<typename Type> class Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
 inline bool
-hash_table<Descriptor, Allocator>::too_empty_p (unsigned int elts)
+hash_table<Descriptor, Lazy, Allocator>::too_empty_p (unsigned int elts)
 {
   return elts * 8 < m_size && m_size > 32;
 }
@@ -708,9 +738,10 @@ hash_table<Descriptor, Allocator>::too_e
    table entries is changed.  If memory allocation fails, this function
    will abort.  */
 
-template<typename Descriptor, template<typename Type> class Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
 void
-hash_table<Descriptor, Allocator>::expand ()
+hash_table<Descriptor, Lazy, Allocator>::expand ()
 {
   value_type *oentries = m_entries;
   unsigned int oindex = m_size_prime_index;
@@ -769,9 +800,10 @@ hash_table<Descriptor, Allocator>::expan
 
 /* Implements empty() in cases where it isn't a no-op.  */
 
-template<typename Descriptor, template<typename Type> class Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
 void
-hash_table<Descriptor, Allocator>::empty_slow ()
+hash_table<Descriptor, Lazy, Allocator>::empty_slow ()
 {
   size_t size = m_size;
   size_t nsize = size;
@@ -819,9 +851,10 @@ hash_table<Descriptor, Allocator>::empty
    useful when you've already done the lookup and don't want to do it
    again. */
 
-template<typename Descriptor, template<typename Type> class Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
 void
-hash_table<Descriptor, Allocator>::clear_slot (value_type *slot)
+hash_table<Descriptor, Lazy, Allocator>::clear_slot (value_type *slot)
 {
   gcc_checking_assert (!(slot < m_entries || slot >= m_entries + size ()
                         || is_empty (*slot) || is_deleted (*slot)));
@@ -836,15 +869,18 @@ hash_table<Descriptor, Allocator>::clear
    COMPARABLE element starting with the given HASH value.  It cannot
    be used to insert or delete an element. */
 
-template<typename Descriptor, template<typename Type> class Allocator>
-typename hash_table<Descriptor, Allocator>::value_type &
-hash_table<Descriptor, Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+typename hash_table<Descriptor, Lazy, Allocator>::value_type &
+hash_table<Descriptor, Lazy, Allocator>
 ::find_with_hash (const compare_type &comparable, hashval_t hash)
 {
   m_searches++;
   size_t size = m_size;
   hashval_t index = hash_table_mod1 (hash, m_size_prime_index);
 
+  if (Lazy && m_entries == NULL)
+    m_entries = alloc_entries (size);
   value_type *entry = &m_entries[index];
   if (is_empty (*entry)
       || (!is_deleted (*entry) && Descriptor::equal (*entry, comparable)))
@@ -873,12 +909,20 @@ hash_table<Descriptor, Allocator>
    write the value you want into the returned slot.  When inserting an
    entry, NULL may be returned if memory allocation fails. */
 
-template<typename Descriptor, template<typename Type> class Allocator>
-typename hash_table<Descriptor, Allocator>::value_type *
-hash_table<Descriptor, Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+typename hash_table<Descriptor, Lazy, Allocator>::value_type *
+hash_table<Descriptor, Lazy, Allocator>
 ::find_slot_with_hash (const compare_type &comparable, hashval_t hash,
                       enum insert_option insert)
 {
+  if (Lazy && m_entries == NULL)
+    {
+      if (insert == INSERT)
+       m_entries = alloc_entries (m_size);
+      else
+       return NULL;
+    }
   if (insert == INSERT && m_size * 3 <= m_n_elements * 4)
     expand ();
 
@@ -934,9 +978,10 @@ hash_table<Descriptor, Allocator>
    from hash table starting with the given HASH.  If there is no
    matching element in the hash table, this function does nothing. */
 
-template<typename Descriptor, template<typename Type> class Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
 void
-hash_table<Descriptor, Allocator>
+hash_table<Descriptor, Lazy, Allocator>
 ::remove_elt_with_hash (const compare_type &comparable, hashval_t hash)
 {
   value_type *slot = find_slot_with_hash (comparable, hash, NO_INSERT);
@@ -953,15 +998,18 @@ hash_table<Descriptor, Allocator>
    each live entry.  If CALLBACK returns false, the iteration stops.
    ARGUMENT is passed as CALLBACK's second argument. */
 
-template<typename Descriptor,
+template<typename Descriptor, bool Lazy,
          template<typename Type> class Allocator>
 template<typename Argument,
-         int (*Callback)
-     (typename hash_table<Descriptor, Allocator>::value_type *slot,
-      Argument argument)>
+        int (*Callback)
+        (typename hash_table<Descriptor, Lazy, Allocator>::value_type *slot,
+        Argument argument)>
 void
-hash_table<Descriptor, Allocator>::traverse_noresize (Argument argument)
+hash_table<Descriptor, Lazy, Allocator>::traverse_noresize (Argument argument)
 {
+  if (Lazy && m_entries == NULL)
+    return;
+
   value_type *slot = m_entries;
   value_type *limit = slot + size ();
 
@@ -979,16 +1027,16 @@ hash_table<Descriptor, Allocator>::trave
 /* Like traverse_noresize, but does resize the table when it is too empty
    to improve effectivity of subsequent calls.  */
 
-template <typename Descriptor,
+template <typename Descriptor, bool Lazy,
          template <typename Type> class Allocator>
 template <typename Argument,
          int (*Callback)
-     (typename hash_table<Descriptor, Allocator>::value_type *slot,
-      Argument argument)>
+         (typename hash_table<Descriptor, Lazy, Allocator>::value_type *slot,
+         Argument argument)>
 void
-hash_table<Descriptor, Allocator>::traverse (Argument argument)
+hash_table<Descriptor, Lazy, Allocator>::traverse (Argument argument)
 {
-  if (too_empty_p (elements ()))
+  if (too_empty_p (elements ()) && (!Lazy || m_entries))
     expand ();
 
   traverse_noresize <Argument, Callback> (argument);
@@ -996,9 +1044,10 @@ hash_table<Descriptor, Allocator>::trave
 
 /* Slide down the iterator slots until an active entry is found.  */
 
-template<typename Descriptor, template<typename Type> class Allocator>
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
 void
-hash_table<Descriptor, Allocator>::iterator::slide ()
+hash_table<Descriptor, Lazy, Allocator>::iterator::slide ()
 {
   for ( ; m_slot < m_limit; ++m_slot )
     {
@@ -1012,9 +1061,10 @@ hash_table<Descriptor, Allocator>::itera
 
 /* Bump the iterator.  */
 
-template<typename Descriptor, template<typename Type> class Allocator>
-inline typename hash_table<Descriptor, Allocator>::iterator &
-hash_table<Descriptor, Allocator>::iterator::operator ++ ()
+template<typename Descriptor, bool Lazy,
+        template<typename Type> class Allocator>
+inline typename hash_table<Descriptor, Lazy, Allocator>::iterator &
+hash_table<Descriptor, Lazy, Allocator>::iterator::operator ++ ()
 {
   ++m_slot;
   slide ();
--- gcc/hash-set.h.jj   2019-01-01 12:37:15.826996789 +0100
+++ gcc/hash-set.h      2019-03-20 23:54:35.115023084 +0100
@@ -21,7 +21,8 @@ along with GCC; see the file COPYING3.
 #ifndef hash_set_h
 #define hash_set_h
 
-template<typename KeyId, typename Traits = default_hash_traits<KeyId> >
+template<typename KeyId, bool Lazy = false,
+        typename Traits = default_hash_traits<KeyId> >
 class hash_set
 {
 public:
@@ -32,12 +33,12 @@ public:
   /* Create a hash_set in gc memory with space for at least n elements.  */
 
   static hash_set *
-    create_ggc (size_t n)
-      {
-       hash_set *set = ggc_alloc<hash_set> ();
-       new (set) hash_set (n, true);
-       return set;
-      }
+  create_ggc (size_t n)
+    {
+      hash_set *set = ggc_alloc<hash_set> ();
+      new (set) hash_set (n, true);
+      return set;
+    }
 
   /* If key k isn't already in the map add it to the map, and
      return false.  Otherwise return true.  */
@@ -56,6 +57,9 @@ public:
 
   bool contains (const Key &k)
     {
+      if (Lazy)
+       return (m_table.find_slot_with_hash (k, Traits::hash (k), NO_INSERT)
+               != NULL);
       Key &e = m_table.find_with_hash (k, Traits::hash (k));
       return !Traits::is_empty (e);
     }
@@ -71,7 +75,7 @@ public:
   template<typename Arg, bool (*f)(const typename Traits::value_type &, Arg)>
   void traverse (Arg a) const
     {
-      for (typename hash_table<Traits>::iterator iter = m_table.begin ();
+      for (typename hash_table<Traits, Lazy>::iterator iter = m_table.begin ();
           iter != m_table.end (); ++iter)
        f (*iter, a);
     }
@@ -87,7 +91,8 @@ public:
   class iterator
   {
   public:
-    explicit iterator (const typename hash_table<Traits>::iterator &iter) :
+    explicit iterator (const typename hash_table<Traits,
+                                                Lazy>::iterator &iter) :
       m_iter (iter) {}
 
     iterator &operator++ ()
@@ -109,7 +114,7 @@ public:
       }
 
   private:
-    typename hash_table<Traits>::iterator m_iter;
+    typename hash_table<Traits, Lazy>::iterator m_iter;
   };
 
   /* Standard iterator retrieval methods.  */
@@ -120,11 +125,14 @@ public:
 
 private:
 
-  template<typename T, typename U> friend void gt_ggc_mx (hash_set<T, U> *);
-  template<typename T, typename U> friend void gt_pch_nx (hash_set<T, U> *);
-      template<typename T, typename U> friend void gt_pch_nx (hash_set<T, U> 
*, gt_pointer_operator, void *);
+  template<typename T, typename U>
+  friend void gt_ggc_mx (hash_set<T, false, U> *);
+  template<typename T, typename U>
+  friend void gt_pch_nx (hash_set<T, false, U> *);
+  template<typename T, typename U>
+  friend void gt_pch_nx (hash_set<T, false, U> *, gt_pointer_operator, void *);
 
-  hash_table<Traits> m_table;
+  hash_table<Traits, Lazy> m_table;
 };
 
 /* Generic hash_set<TYPE> debug helper.
@@ -169,21 +177,21 @@ debug_helper (hash_set<T> &ref)
 
 template<typename K, typename H>
 static inline void
-gt_ggc_mx (hash_set<K, H> *h)
+gt_ggc_mx (hash_set<K, false, H> *h)
 {
   gt_ggc_mx (&h->m_table);
 }
 
 template<typename K, typename H>
 static inline void
-gt_pch_nx (hash_set<K, H> *h)
+gt_pch_nx (hash_set<K, false, H> *h)
 {
   gt_pch_nx (&h->m_table);
 }
 
 template<typename K, typename H>
 static inline void
-gt_pch_nx (hash_set<K, H> *h, gt_pointer_operator op, void *cookie)
+gt_pch_nx (hash_set<K, false, H> *h, gt_pointer_operator op, void *cookie)
 {
   op (&h->m_table.m_entries, cookie);
 }
--- gcc/attribs.c.jj    2019-03-09 13:07:38.129663934 +0100
+++ gcc/attribs.c       2019-03-21 09:28:03.163372732 +0100
@@ -2075,7 +2075,7 @@ test_attribute_exclusions ()
   const size_t ntables = ARRAY_SIZE (attribute_tables);
 
   /* Set of pairs of mutually exclusive attributes.  */
-  typedef hash_set<excl_pair, excl_hash_traits> exclusion_set;
+  typedef hash_set<excl_pair, false, excl_hash_traits> exclusion_set;
   exclusion_set excl_set;
 
   for (size_t ti0 = 0; ti0 != ntables; ++ti0)
--- gcc/c-family/c-common.c.jj  2019-03-20 12:24:57.320308115 +0100
+++ gcc/c-family/c-common.c     2019-03-21 09:13:48.031176668 +0100
@@ -8731,7 +8731,7 @@ try_to_locate_new_include_insertion_poin
    no guarantee that two different diagnostics that are recommending
    adding e.g. "<stdio.h>" are using the same buffer.  */
 
-typedef hash_set <const char *, nofree_string_hash> per_file_includes_t;
+typedef hash_set <const char *, false, nofree_string_hash> per_file_includes_t;
 
 /* The map itself.  We don't need string comparison for the filename keys,
    as they come from libcpp.  */
--- gcc/jit/jit-recording.c.jj  2019-02-05 23:27:29.010465683 +0100
+++ gcc/jit/jit-recording.c     2019-03-21 09:14:32.782454553 +0100
@@ -245,7 +245,7 @@ class reproducer : public dump
   {
     static void remove (const char *) {}
   };
-  hash_set<const char *, hash_traits> m_set_identifiers;
+  hash_set<const char *, false, hash_traits> m_set_identifiers;
   allocator m_allocator;
 };
 
2019-03-21  Jakub Jelinek  <ja...@redhat.com>

        PR c++/89767
        * cp-tree.h (add_capture): Add ids argument.
        * parser.c (cp_parser_lambda_introducer): Add ids variable and pass
        its address to add_capture calls.
        * lambda.c (add_capture): Add ids argument, don't use
        IDENTIFIER_MARKED, instead use ids hash_set for that.
        (register_capture_members): Don't clear IDENTIFIER_MARKED here.
        (add_default_capture): Adjust add_capture caller.

        * g++.dg/cpp1y/lambda-init18.C: New test.
        * g++.dg/cpp1y/lambda-init19.C: New test.
        * g++.dg/cpp1y/pr89767.C: New test.

--- gcc/cp/cp-tree.h.jj 2019-03-20 17:05:02.133174253 +0100
+++ gcc/cp/cp-tree.h    2019-03-21 09:20:21.664822615 +0100
@@ -7150,7 +7150,8 @@ extern tree lambda_return_type                    (tree);
 extern tree lambda_proxy_type                  (tree);
 extern tree lambda_function                    (tree);
 extern void apply_deduced_return_type           (tree, tree);
-extern tree add_capture                         (tree, tree, tree, bool, bool);
+extern tree add_capture                         (tree, tree, tree, bool, bool,
+                                                hash_set<tree, false> *);
 extern tree add_default_capture                 (tree, tree, tree);
 extern void insert_capture_proxy               (tree);
 extern void insert_pending_capture_proxies     (void);
--- gcc/cp/parser.c.jj  2019-03-20 17:05:02.072175252 +0100
+++ gcc/cp/parser.c     2019-03-21 09:22:46.710481168 +0100
@@ -10547,6 +10547,7 @@ cp_parser_lambda_introducer (cp_parser*
        error ("non-local lambda expression cannot have a capture-default");
     }
 
+  hash_set<tree, false> ids;
   while (cp_lexer_next_token_is_not (parser->lexer, CPP_CLOSE_SQUARE))
     {
       cp_token* capture_token;
@@ -10586,7 +10587,7 @@ cp_parser_lambda_introducer (cp_parser*
                       /*id=*/this_identifier,
                       /*initializer=*/finish_this_expr (),
                       /*by_reference_p=*/true,
-                      explicit_init_p);
+                      explicit_init_p, &ids);
          continue;
        }
 
@@ -10604,7 +10605,7 @@ cp_parser_lambda_introducer (cp_parser*
                       /*id=*/this_identifier,
                       /*initializer=*/finish_this_expr (),
                       /*by_reference_p=*/false,
-                      explicit_init_p);
+                      explicit_init_p, &ids);
          continue;
        }
 
@@ -10757,7 +10758,7 @@ cp_parser_lambda_introducer (cp_parser*
                   capture_id,
                   capture_init_expr,
                   /*by_reference_p=*/capture_kind == BY_REFERENCE,
-                  explicit_init_p);
+                  explicit_init_p, &ids);
 
       /* If there is any qualification still in effect, clear it
         now; we will be starting fresh with the next capture.  */
--- gcc/cp/lambda.c.jj  2019-03-20 17:05:02.179173500 +0100
+++ gcc/cp/lambda.c     2019-03-21 09:22:26.137813272 +0100
@@ -500,11 +500,12 @@ vla_capture_type (tree array_type)
 /* From an ID and INITIALIZER, create a capture (by reference if
    BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
    and return it.  If ID is `this', BY_REFERENCE_P says whether
-   `*this' is captured by reference.  */
+   `*this' is captured by reference.  IDS is used during introducer
+   parsing to detect duplicate captures if there are many captures.  */
 
 tree
 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
-            bool explicit_init_p)
+            bool explicit_init_p, hash_set<tree, false> *ids)
 {
   char *buf;
   tree type, member, name;
@@ -605,13 +606,35 @@ add_capture (tree lambda, tree id, tree
      for duplicates.  */
   if (!LAMBDA_EXPR_CLOSURE (lambda))
     {
-      if (IDENTIFIER_MARKED (name))
+      gcc_assert (ids);
+      bool found;
+      if (ids->elements () == 0)
+       {
+         found = false;
+         /* Optimize for the zero or one explicit captures cases
+            and only create the hash_set after adding second capture.  */
+         if (LAMBDA_EXPR_CAPTURE_LIST (lambda))
+           {
+             tree field = TREE_PURPOSE (LAMBDA_EXPR_CAPTURE_LIST (lambda));
+             if (PACK_EXPANSION_P (field))
+               field = PACK_EXPANSION_PATTERN (field);
+             if (name == DECL_NAME (field))
+               found = true;
+             else
+               {
+                 ids->add (DECL_NAME (field));
+                 ids->add (name);
+               }
+           }
+       }
+      else
+       found = ids->add (name);
+      if (found)
        {
          pedwarn (input_location, 0,
                   "already captured %qD in lambda expression", id);
          return NULL_TREE;
        }
-      IDENTIFIER_MARKED (name) = true;
     }
 
   if (variadic)
@@ -674,8 +697,6 @@ register_capture_members (tree captures)
   if (PACK_EXPANSION_P (field))
     field = PACK_EXPANSION_PATTERN (field);
 
-  /* We set this in add_capture to avoid duplicates.  */
-  IDENTIFIER_MARKED (DECL_NAME (field)) = false;
   finish_member_declaration (field);
 }
 
@@ -706,7 +727,7 @@ add_default_capture (tree lambda_stack,
                            (this_capture_p
                             || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
                                 == CPLD_REFERENCE)),
-                           /*explicit_init_p=*/false);
+                           /*explicit_init_p=*/false, NULL);
       initializer = convert_from_reference (var);
 
       /* Warn about deprecated implicit capture of this via [=].  */
--- gcc/testsuite/g++.dg/cpp1y/lambda-init18.C.jj       2019-03-20 
17:35:25.121556064 +0100
+++ gcc/testsuite/g++.dg/cpp1y/lambda-init18.C  2019-03-20 17:35:25.121556064 
+0100
@@ -0,0 +1,12 @@
+// PR c++/89767
+// { dg-do compile { target c++14 } }
+
+void bar (int);
+
+void
+foo ()
+{
+  int x = 0;
+  auto z = [x, y = [x] { bar (x); }] { y (); bar (x); };
+  z ();
+}
--- gcc/testsuite/g++.dg/cpp1y/lambda-init19.C.jj       2019-03-20 
17:35:25.121556064 +0100
+++ gcc/testsuite/g++.dg/cpp1y/lambda-init19.C  2019-03-20 17:35:25.121556064 
+0100
@@ -0,0 +1,15 @@
+// PR c++/89767
+// { dg-do compile { target c++14 } }
+
+void bar (int);
+
+void
+foo ()
+{
+  int x = 0;
+  int a = 0, b = 0, c = 0, d = 0, e = 0, f = 0, g = 0, h = 0;
+  auto z = [x, y = [x] { bar (x); }, x] { y (); bar (x); };    // { dg-error 
"already captured 'x' in lambda expression" }
+  auto w = [x, a, b, c, d, y = [x] { bar (x); }, e, f, g, h, x] { y (); bar (x 
+ a + b + c + d + e + f + g + h); };    // { dg-error "already captured 'x' in 
lambda expression" }
+  z ();
+  w ();
+}
--- gcc/testsuite/g++.dg/cpp1y/pr89767.C.jj     2019-03-20 17:35:25.121556064 
+0100
+++ gcc/testsuite/g++.dg/cpp1y/pr89767.C        2019-03-20 17:35:25.121556064 
+0100
@@ -0,0 +1,32 @@
+// PR c++/89767
+// { dg-do compile { target c++14 } }
+// { dg-options "-O2 -Wall" }
+
+template <typename d> struct e { using g = d; };
+template <typename d, template <typename> class> using h = e<d>;
+template <typename d, template <typename> class i>
+using j = typename h<d, i>::g;
+template <typename c> int k(c);
+template <typename...> class au;
+struct l { template <typename c> using m = typename c::f; };
+struct s : l { using af = j<au<int, int> *, m>; };
+template <unsigned long, typename> struct o;
+template <long p, typename c> using q = typename o<p, c>::g;
+template <typename> struct r;
+template <typename c> struct r<c *> { typedef c aj; };
+template <typename ak, typename> struct al { typename r<ak>::aj operator*(); 
void operator++(); };
+template <typename am, typename an, typename ao>
+bool operator!=(al<am, ao>, al<an, ao>);
+template <unsigned long, typename...> struct ap;
+template <unsigned long aq, typename ar, typename... as>
+struct ap<aq, ar, as...> : ap<1, as...> {};
+template <unsigned long aq, typename ar> struct ap<aq, ar> {};
+template <typename... at> class au : public ap<0, at...> {};
+template <unsigned long p, typename ar, typename... as>
+struct o<p, au<ar, as...>> : o<p - 1, au<as...>> {};
+template <typename ar, typename... as> struct o<0, au<ar, as...>> { typedef ar 
g; };
+template <long p, typename ar> constexpr ar av(ap<p, ar> __t) { return ar (); }
+template <int p, typename... at> constexpr q<p, au<at...>> aw(au<at...> __t) { 
av<p>(__t); return q<p, au<at...>> (); }
+struct bg { typedef s::af af; };
+struct F { typedef al<bg::af, int> bk; bk begin(); bk end(); };
+void bo() { int t = 0; F cv; for (auto bp : cv) [t, n = k(aw<1>(bp))] {}; }
2019-03-21  Jakub Jelinek  <ja...@redhat.com>

        PR c++/71446
        * call.c (filed_in_pset): Change pset from hash_set<tree> * to
        hash_set<tree, false> &, adjust uses accordingly.
        (build_aggr_conv): Change pset from hash_set<tree> *
        to hash_set<tree, false>.  Replace goto fail; with return NULL;,
        adjust pset uses.

--- gcc/cp/call.c.jj    2019-03-14 09:11:09.321041589 +0100
+++ gcc/cp/call.c       2019-03-21 09:30:32.174967271 +0100
@@ -907,9 +907,9 @@ can_convert_array (tree atype, tree ctor
    is in PSET.  */
 
 static bool
-field_in_pset (hash_set<tree> *pset, tree field)
+field_in_pset (hash_set<tree, false> &pset, tree field)
 {
-  if (pset->contains (field))
+  if (pset.contains (field))
     return true;
   if (ANON_AGGR_TYPE_P (TREE_TYPE (field)))
     for (field = TYPE_FIELDS (TREE_TYPE (field));
@@ -934,7 +934,7 @@ build_aggr_conv (tree type, tree ctor, i
   conversion *c;
   tree field = next_initializable_field (TYPE_FIELDS (type));
   tree empty_ctor = NULL_TREE;
-  hash_set<tree> *pset = NULL;
+  hash_set<tree, false> pset;
 
   /* We already called reshape_init in implicit_conversion.  */
 
@@ -964,7 +964,7 @@ build_aggr_conv (tree type, tree ctor, i
                                      complain);
 
              if (!ok)
-               goto fail;
+               return NULL;
              /* For unions, there should be just one initializer.  */
              if (TREE_CODE (type) == UNION_TYPE)
                {
@@ -972,12 +972,10 @@ build_aggr_conv (tree type, tree ctor, i
                  i = 1;
                  break;
                }
-             if (pset == NULL)
-               pset = new hash_set<tree>;
-             pset->add (idx);
+             pset.add (idx);
            }
          else
-           goto fail;
+           return NULL;
        }
     }
 
@@ -987,7 +985,7 @@ build_aggr_conv (tree type, tree ctor, i
       tree val;
       bool ok;
 
-      if (pset && field_in_pset (pset, field))
+      if (pset.elements () && field_in_pset (pset, field))
        continue;
       if (i < CONSTRUCTOR_NELTS (ctor))
        {
@@ -998,7 +996,7 @@ build_aggr_conv (tree type, tree ctor, i
        val = get_nsdmi (field, /*ctor*/false, complain);
       else if (TYPE_REF_P (ftype))
        /* Value-initialization of reference is ill-formed.  */
-       goto fail;
+       return NULL;
       else
        {
          if (empty_ctor == NULL_TREE)
@@ -1014,22 +1012,15 @@ build_aggr_conv (tree type, tree ctor, i
                              complain);
 
       if (!ok)
-       goto fail;
+       return NULL;
 
       if (TREE_CODE (type) == UNION_TYPE)
        break;
     }
 
   if (i < CONSTRUCTOR_NELTS (ctor))
-    {
-    fail:
-      if (pset)
-       delete pset;
-      return NULL;
-    }
+    return NULL;
 
-  if (pset)
-    delete pset;
   c = alloc_conversion (ck_aggr);
   c->type = type;
   c->rank = cr_exact;

Reply via email to