Re: [21/32] miscelaneous

2020-11-06 Thread Nathan Sidwell

On 11/5/20 8:30 AM, Richard Biener wrote:

On Tue, Nov 3, 2020 at 10:16 PM Nathan Sidwell  wrote:


These are changes to gcc/tree.h adding some raw accessors to nodes,
which seemed preferable to direct field access.  I also needed access to
the integral constant cache


can you please document the adjusted interface to cache_integer_cst in
its (non-existing) function level comment?  It looks like 'replace'== true
turns it into get_or_insert from now put with an assertion it wasn't in the
cache.


Sure.  It's a little weird in that the current behaviour is to allow 
duplicates in the hash table, but not in the type's small-value vector.


I renamed the new parameter and documented what happens.  I'll apply 
this as a distinct patch during the merge (with changelog).  For now it 
lives on the modules branch


nathan

--
Nathan Sidwell
diff --git c/gcc/tree.c w/gcc/tree.c
index 9260772b846..9e10df0d7d0 100644
--- c/gcc/tree.c
+++ w/gcc/tree.c
@@ -1727,8 +1727,15 @@ wide_int_to_tree (tree type, const poly_wide_int_ref )
   return build_poly_int_cst (type, value);
 }
 
-void
-cache_integer_cst (tree t)
+/* Insert INTEGER_CST T into a cache of integer constants.  And return
+   the cached constant (which may or may not be T).  If MAY_DUPLICATE
+   is false, and T falls into the type's 'smaller values' range, there
+   cannot be an existing entry.  Otherwise, if MAY_DUPLICATE is true,
+   or the value is large, should an existing entry exist, it is
+   returned (rather than inserting T).  */
+
+tree
+cache_integer_cst (tree t, bool may_duplicate ATTRIBUTE_UNUSED)
 {
   tree type = TREE_TYPE (t);
   int ix = -1;
@@ -1742,7 +1749,7 @@ cache_integer_cst (tree t)
   switch (TREE_CODE (type))
 {
 case NULLPTR_TYPE:
-  gcc_assert (integer_zerop (t));
+  gcc_checking_assert (integer_zerop (t));
   /* Fallthru.  */
 
 case POINTER_TYPE:
@@ -1822,21 +1829,32 @@ cache_integer_cst (tree t)
 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
 	}
 
-  gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
-  TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
+  if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
+	{
+	  gcc_checking_assert (may_duplicate);
+	  t = r;
+	}
+  else
+	TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
 }
   else
 {
   /* Use the cache of larger shared ints.  */
   tree *slot = int_cst_hash_table->find_slot (t, INSERT);
-  /* If there is already an entry for the number verify it's the
- same.  */
-  if (*slot)
-	gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
+  if (tree r = *slot)
+	{
+	  /* If there is already an entry for the number verify it's the
+	 same value.  */
+	  gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
+	  /* And return the cached value.  */
+	  t = r;
+	}
   else
 	/* Otherwise insert this one into the hash table.  */
 	*slot = t;
 }
+
+  return t;
 }
 
 


Re: [21/32] miscelaneous

2020-11-05 Thread Richard Biener via Gcc-patches
On Tue, Nov 3, 2020 at 10:16 PM Nathan Sidwell  wrote:
>
> These are changes to gcc/tree.h adding some raw accessors to nodes,
> which seemed preferable to direct field access.  I also needed access to
> the integral constant cache

can you please document the adjusted interface to cache_integer_cst in
its (non-existing) function level comment?  It looks like 'replace'== true
turns it into get_or_insert from now put with an assertion it wasn't in the
cache.

Otherwise OK.

Thanks,
Richard.

>
> --
> Nathan Sidwell
>


[21/32] miscelaneous

2020-11-03 Thread Nathan Sidwell
These are changes to gcc/tree.h adding some raw accessors to nodes, 
which seemed preferable to direct field access.  I also needed access to 
the integral constant cache



--
Nathan Sidwell

diff --git c/gcc/tree.h w/gcc/tree.h
index 7f0aa5b8d1d..13062f52919 100644
--- c/gcc/tree.h
+++ w/gcc/tree.h
@@ -2521,25 +2521,28 @@ extern tree vector_element_bits_tree (const_tree);
 #define DECL_SIZE(NODE) (DECL_COMMON_CHECK (NODE)->decl_common.size)
 /* Likewise for the size in bytes.  */
 #define DECL_SIZE_UNIT(NODE) (DECL_COMMON_CHECK (NODE)->decl_common.size_unit)
+#define DECL_ALIGN_RAW(NODE) (DECL_COMMON_CHECK (NODE)->decl_common.align)
 /* Returns the alignment required for the datum, in bits.  It must
be a power of two, but an "alignment" of zero is supported
(e.g. as "uninitialized" sentinel).  */
-#define DECL_ALIGN(NODE) \
-(DECL_COMMON_CHECK (NODE)->decl_common.align \
- ? ((unsigned)1) << ((NODE)->decl_common.align - 1) : 0)
+#define DECL_ALIGN(NODE)	\
+  (DECL_ALIGN_RAW (NODE)	\
+   ? ((unsigned)1) << (DECL_ALIGN_RAW (NODE) - 1) : 0)
 /* Specify that DECL_ALIGN(NODE) is X.  */
 #define SET_DECL_ALIGN(NODE, X) \
-(DECL_COMMON_CHECK (NODE)->decl_common.align = ffs_hwi (X))
+  (DECL_ALIGN_RAW (NODE) = ffs_hwi (X))
 
 /* The minimum alignment necessary for the datum, in bits, without
warning.  */
-#define DECL_WARN_IF_NOT_ALIGN(NODE) \
-(DECL_COMMON_CHECK (NODE)->decl_common.warn_if_not_align \
- ? ((unsigned)1) << ((NODE)->decl_common.warn_if_not_align - 1) : 0)
+#define DECL_WARN_IF_NOT_ALIGN_RAW(NODE)			\
+  (DECL_COMMON_CHECK (NODE)->decl_common.warn_if_not_align)
+#define DECL_WARN_IF_NOT_ALIGN(NODE)	\
+  (DECL_WARN_IF_NOT_ALIGN_RAW (NODE)	\
+   ? ((unsigned)1) << (DECL_WARN_IF_NOT_ALIGN_RAW (NODE) - 1) : 0)
 
 /* Specify that DECL_WARN_IF_NOT_ALIGN(NODE) is X.  */
-#define SET_DECL_WARN_IF_NOT_ALIGN(NODE, X) \
-(DECL_COMMON_CHECK (NODE)->decl_common.warn_if_not_align = ffs_hwi (X))
+#define SET_DECL_WARN_IF_NOT_ALIGN(NODE, X)		\
+  (DECL_WARN_IF_NOT_ALIGN_RAW (NODE) = ffs_hwi (X))
 
 /* The alignment of NODE, in bytes.  */
 #define DECL_ALIGN_UNIT(NODE) (DECL_ALIGN (NODE) / BITS_PER_UNIT)
@@ -5118,7 +5121,7 @@ extern const_tree strip_invariant_refs (const_tree);
 extern tree lhd_gcc_personality (void);
 extern void assign_assembler_name_if_needed (tree);
 extern bool warn_deprecated_use (tree, tree);
-extern void cache_integer_cst (tree);
+extern tree cache_integer_cst (tree, bool small = false);
 extern const char *combined_fn_name (combined_fn);
 
 /* Compare and hash for any structure which begins with a canonical
diff --git c/gcc/tree.c w/gcc/tree.c
index 9260772b846..2656a804ea2 100644
--- c/gcc/tree.c
+++ w/gcc/tree.c
@@ -1727,8 +1727,8 @@ wide_int_to_tree (tree type, const poly_wide_int_ref )
   return build_poly_int_cst (type, value);
 }
 
-void
-cache_integer_cst (tree t)
+tree
+cache_integer_cst (tree t, bool replace)
 {
   tree type = TREE_TYPE (t);
   int ix = -1;
@@ -1822,8 +1822,13 @@ cache_integer_cst (tree t)
 	  TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
 	}
 
-  gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
-  TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
+  if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
+	{
+	  gcc_assert (replace);
+	  t = r;
+	}
+  else
+	TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
 }
   else
 {
@@ -1831,12 +1836,18 @@ cache_integer_cst (tree t)
   tree *slot = int_cst_hash_table->find_slot (t, INSERT);
   /* If there is already an entry for the number verify it's the
  same.  */
-  if (*slot)
-	gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
+  if (tree r = *slot)
+	{
+	  gcc_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
+	  if (replace)
+	t = r;
+	}
   else
 	/* Otherwise insert this one into the hash table.  */
 	*slot = t;
 }
+
+  return t;
 }