This fixes a regression for GCC 9 which was fixed along modifications
on trunk.  The patch backports some refactoring and hereby the
relevant change,

-  if (*disambiguate_only)
+  /* If we are looking for redundant stores do not create new hashtable
+     entries from aliasing defs with made up alias-sets.  */
+  if (*disambiguate_only || !data->tbaa_p)
     return (void *)-1;

Bootstrapped on x86_64-unknown-linux-gnu, testing in progress.

The new testcase also goes to trunk.

Richard.

2019-08-15  Richard Biener  <rguent...@suse.de>

        PR tree-optimization/91445
        * gcc.dg/torture/pr91445.c: New testcase.

        Backport from mainline
        2019-07-05  Richard Biener  <rguent...@suse.de>

        PR tree-optimization/91091
        * tree-ssa-alias.h (get_continuation_for_phi): Add tbaa_p parameter.
        (walk_non_aliased_vuses): Likewise.
        * tree-ssa-alias.c (maybe_skip_until): Pass down tbaa_p.
        (get_continuation_for_phi): New tbaa_p parameter and pass
        it down.
        (walk_non_aliased_vuses): Likewise.
        * tree-ssa-pre.c (translate_vuse_through_block): Likewise.
        * tree-ssa-scopedtables.c (avail_exprs_stack::lookup_avail_expr):
        Likewise.
        * tree-ssa-sccvn.c (struct vn_walk_cb_data): Add tbaa_p flag.
        (vn_reference_lookup_3): Handle and pass down tbaa_p flag.
        (vn_reference_lookup_pieces): Adjust.
        (vn_reference_lookup): Remove alias-set altering, instead pass
        down false as tbaa_p.

        * gcc.dg/tree-ssa/pr91091-2.c: New testcase.

        2019-07-04  Richard Biener  <rguent...@suse.de>

        * tree-ssa-sccvn.h (vn_reference_lookup): Add last_vuse_ptr
        argument.
        * tree-ssa-sccvn.c (last_vuse_ptr, vn_walk_kind): Move
        globals into...
        (struct vn_walk_cb_data): New callback data struct.
        (vn_reference_lookup_2): Adjust.
        (vn_reference_lookup_3): Likewise.
        (vn_reference_lookup_pieces): Likewise.
        (vn_reference_lookup): Likewise, get last_vuse_ptr argument.
        (visit_reference_op_load): Adjust.

Index: gcc/testsuite/gcc.dg/torture/pr91445.c
===================================================================
--- gcc/testsuite/gcc.dg/torture/pr91445.c      (nonexistent)
+++ gcc/testsuite/gcc.dg/torture/pr91445.c      (working copy)
@@ -0,0 +1,22 @@
+/* { dg-do run } */
+
+struct S { _Bool x; };
+
+void
+foo (struct S *s)
+{
+  __builtin_memset (s, 0x11, sizeof (struct S));
+  s->x = 1;
+}
+
+int
+main ()
+{
+  struct S s;
+  foo (&s);
+  char c;
+  __builtin_memcpy (&c, &s.x, 1);
+  if (c != 1)
+    __builtin_abort ();
+  return 0;
+}
Index: gcc/testsuite/gcc.dg/tree-ssa/pr91091-2.c
===================================================================
--- gcc/testsuite/gcc.dg/tree-ssa/pr91091-2.c   (nonexistent)
+++ gcc/testsuite/gcc.dg/tree-ssa/pr91091-2.c   (working copy)
@@ -0,0 +1,15 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+struct s { int x; };
+struct t { int x; };
+
+void swap(struct s* p, struct t* q)
+{
+  p->x = q->x;
+  q->x = p->x;
+}
+
+/* The second statement is redundant.  */
+/* { dg-final { scan-tree-dump-times "x = " 1 "fre1" } } */
+/* { dg-final { scan-tree-dump-times " = \[^;\]*x;" 1 "fre1" } } */
Index: gcc/tree-ssa-alias.c
===================================================================
--- gcc/tree-ssa-alias.c        (revision 274504)
+++ gcc/tree-ssa-alias.c        (working copy)
@@ -2599,8 +2599,8 @@ stmt_kills_ref_p (gimple *stmt, tree ref
 
 static bool
 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
-                 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
-                 bool abort_on_visited,
+                 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
+                 bitmap *visited, bool abort_on_visited,
                  void *(*translate)(ao_ref *, tree, void *, bool *),
                  void *data)
 {
@@ -2634,7 +2634,7 @@ maybe_skip_until (gimple *phi, tree &tar
          /* An already visited PHI node ends the walk successfully.  */
          if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
            return !abort_on_visited;
-         vuse = get_continuation_for_phi (def_stmt, ref, limit,
+         vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
                                           visited, abort_on_visited,
                                           translate, data);
          if (!vuse)
@@ -2649,7 +2649,7 @@ maybe_skip_until (gimple *phi, tree &tar
          if ((int)limit <= 0)
            return false;
          --limit;
-         if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
+         if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
            {
              bool disambiguate_only = true;
              if (translate
@@ -2681,7 +2681,7 @@ maybe_skip_until (gimple *phi, tree &tar
    Returns NULL_TREE if no suitable virtual operand can be found.  */
 
 tree
-get_continuation_for_phi (gimple *phi, ao_ref *ref,
+get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
                          unsigned int &limit, bitmap *visited,
                          bool abort_on_visited,
                          void *(*translate)(ao_ref *, tree, void *, bool *),
@@ -2724,7 +2724,8 @@ get_continuation_for_phi (gimple *phi, a
       arg1 = PHI_ARG_DEF (phi, i);
       if (arg1 == arg0)
        ;
-      else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
+      else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
+                                  limit, visited,
                                   abort_on_visited,
                                   /* Do not translate when walking over
                                      backedges.  */
@@ -2768,7 +2769,7 @@ get_continuation_for_phi (gimple *phi, a
    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
 
 void *
-walk_non_aliased_vuses (ao_ref *ref, tree vuse,
+walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
                        void *(*walker)(ao_ref *, tree, void *),
                        void *(*translate)(ao_ref *, tree, void *, bool *),
                        tree (*valueize)(tree),
@@ -2809,7 +2810,7 @@ walk_non_aliased_vuses (ao_ref *ref, tre
       if (gimple_nop_p (def_stmt))
        break;
       else if (gimple_code (def_stmt) == GIMPLE_PHI)
-       vuse = get_continuation_for_phi (def_stmt, ref, limit,
+       vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
                                         &visited, translated, translate, data);
       else
        {
@@ -2819,7 +2820,7 @@ walk_non_aliased_vuses (ao_ref *ref, tre
              break;
            }
          --limit;
-         if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
+         if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
            {
              if (!translate)
                break;
Index: gcc/tree-ssa-alias.h
===================================================================
--- gcc/tree-ssa-alias.h        (revision 274504)
+++ gcc/tree-ssa-alias.h        (working copy)
@@ -131,11 +131,11 @@ extern bool call_may_clobber_ref_p (gcal
 extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
 extern bool stmt_kills_ref_p (gimple *, tree);
 extern bool stmt_kills_ref_p (gimple *, ao_ref *);
-extern tree get_continuation_for_phi (gimple *, ao_ref *,
+extern tree get_continuation_for_phi (gimple *, ao_ref *, bool,
                                      unsigned int &, bitmap *, bool,
                                      void *(*)(ao_ref *, tree, void *, bool *),
                                      void *);
-extern void *walk_non_aliased_vuses (ao_ref *, tree,
+extern void *walk_non_aliased_vuses (ao_ref *, tree, bool,
                                     void *(*)(ao_ref *, tree, void *),
                                     void *(*)(ao_ref *, tree, void *, bool *),
                                     tree (*)(tree), unsigned &, void *);
Index: gcc/tree-ssa-pre.c
===================================================================
--- gcc/tree-ssa-pre.c  (revision 274504)
+++ gcc/tree-ssa-pre.c  (working copy)
@@ -1184,8 +1184,8 @@ translate_vuse_through_block (vec<vn_ref
          bitmap visited = NULL;
          /* Try to find a vuse that dominates this phi node by skipping
             non-clobbering statements.  */
-         vuse = get_continuation_for_phi (phi, &ref, cnt, &visited, false,
-                                          NULL, NULL);
+         vuse = get_continuation_for_phi (phi, &ref, true,
+                                          cnt, &visited, false, NULL, NULL);
          if (visited)
            BITMAP_FREE (visited);
        }
Index: gcc/tree-ssa-sccvn.c
===================================================================
--- gcc/tree-ssa-sccvn.c        (revision 274504)
+++ gcc/tree-ssa-sccvn.c        (working copy)
@@ -132,8 +132,6 @@ along with GCC; see the file COPYING3.
 /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
 #define BB_EXECUTABLE BB_VISITED
 
-static tree *last_vuse_ptr;
-static vn_lookup_kind vn_walk_kind;
 static vn_lookup_kind default_vn_walk_kind;
 
 /* vn_nary_op hashtable helpers.  */
@@ -1678,18 +1676,33 @@ vn_reference_lookup_1 (vn_reference_t vr
   return NULL_TREE;
 }
 
+struct vn_walk_cb_data
+{
+  vn_walk_cb_data (vn_reference_t vr_, tree *last_vuse_ptr_,
+                   vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
+    : vr (vr_), last_vuse_ptr (last_vuse_ptr_), vn_walk_kind (vn_walk_kind_),
+      tbaa_p (tbaa_p_)
+    {}
+
+  vn_reference_t vr;
+  tree *last_vuse_ptr;
+  vn_lookup_kind vn_walk_kind;
+  bool tbaa_p;
+};
+
 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
    with the current VUSE and performs the expression lookup.  */
 
 static void *
-vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
+vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
 {
-  vn_reference_t vr = (vn_reference_t)vr_;
+  vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
+  vn_reference_t vr = data->vr;
   vn_reference_s **slot;
   hashval_t hash;
 
-  if (last_vuse_ptr)
-    *last_vuse_ptr = vuse;
+  if (data->last_vuse_ptr)
+    *data->last_vuse_ptr = vuse;
 
   /* Fixup vuse and hash.  */
   if (vr->vuse)
@@ -1959,10 +1972,11 @@ basic_block vn_context_bb;
    *DISAMBIGUATE_ONLY is set to true.  */
 
 static void *
-vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
+vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
                       bool *disambiguate_only)
 {
-  vn_reference_t vr = (vn_reference_t)vr_;
+  vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
+  vn_reference_t vr = data->vr;
   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
   tree base = ao_ref_base (ref);
   HOST_WIDE_INT offseti, maxsizei;
@@ -1989,7 +2003,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree
                                                      get_alias_set (lhs),
                                                      TREE_TYPE (lhs), lhs_ops);
          if (lhs_ref_ok
-             && !refs_may_alias_p_1 (ref, &lhs_ref, true))
+             && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
            {
              *disambiguate_only = true;
              return NULL;
@@ -2005,21 +2019,21 @@ vn_reference_lookup_3 (ao_ref *ref, tree
          we find a VN result with exactly the same value as the
         possible clobber.  In this case we can ignore the clobber
         and return the found value.  */
-      if (vn_walk_kind == VN_WALKREWRITE
+      if (data->vn_walk_kind == VN_WALKREWRITE
          && is_gimple_reg_type (TREE_TYPE (lhs))
          && types_compatible_p (TREE_TYPE (lhs), vr->type)
          && ref->ref)
        {
-         tree *saved_last_vuse_ptr = last_vuse_ptr;
+         tree *saved_last_vuse_ptr = data->last_vuse_ptr;
          /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
-         last_vuse_ptr = NULL;
+         data->last_vuse_ptr = NULL;
          tree saved_vuse = vr->vuse;
          hashval_t saved_hashcode = vr->hashcode;
-         void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), vr);
+         void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
          /* Need to restore vr->vuse and vr->hashcode.  */
          vr->vuse = saved_vuse;
          vr->hashcode = saved_hashcode;
-         last_vuse_ptr = saved_last_vuse_ptr;
+         data->last_vuse_ptr = saved_last_vuse_ptr;
          if (res && res != (void *)-1)
            {
              vn_reference_t vnresult = (vn_reference_t) res;
@@ -2072,7 +2086,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree
        }
     }
 
-  if (*disambiguate_only)
+  /* If we are looking for redundant stores do not create new hashtable
+     entries from aliasing defs with made up alias-sets.  */
+  if (*disambiguate_only || !data->tbaa_p)
     return (void *)-1;
 
   /* If we cannot constrain the size of the reference we cannot
@@ -2342,7 +2358,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree
 
   /* 5) For aggregate copies translate the reference through them if
      the copy kills ref.  */
-  else if (vn_walk_kind == VN_WALKREWRITE
+  else if (data->vn_walk_kind == VN_WALKREWRITE
           && gimple_assign_single_p (def_stmt)
           && (DECL_P (gimple_assign_rhs1 (def_stmt))
               || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
@@ -2462,7 +2478,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree
       *ref = r;
 
       /* Do not update last seen VUSE after translating.  */
-      last_vuse_ptr = NULL;
+      data->last_vuse_ptr = NULL;
 
       /* Keep looking for the adjusted *REF / VR pair.  */
       return NULL;
@@ -2470,7 +2486,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree
 
   /* 6) For memcpy copies translate the reference through them if
      the copy kills ref.  */
-  else if (vn_walk_kind == VN_WALKREWRITE
+  else if (data->vn_walk_kind == VN_WALKREWRITE
           && is_gimple_reg_type (vr->type)
           /* ???  Handle BCOPY as well.  */
           && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
@@ -2620,7 +2636,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree
       *ref = r;
 
       /* Do not update last seen VUSE after translating.  */
-      last_vuse_ptr = NULL;
+      data->last_vuse_ptr = NULL;
 
       /* Keep looking for the adjusted *REF / VR pair.  */
       return NULL;
@@ -2681,13 +2697,13 @@ vn_reference_lookup_pieces (tree vuse, a
     {
       ao_ref r;
       unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
-      vn_walk_kind = kind;
+      vn_walk_cb_data data (&vr1, NULL, kind, true);
       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
        *vnresult =
-         (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
+         (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
                                                  vn_reference_lookup_2,
                                                  vn_reference_lookup_3,
-                                                 vuse_valueize, limit, &vr1);
+                                                 vuse_valueize, limit, &data);
       gcc_checking_assert (vr1.operands == shared_lookup_references);
     }
 
@@ -2702,11 +2718,12 @@ vn_reference_lookup_pieces (tree vuse, a
    not exist in the hash table or if the result field of the structure
    was NULL..  VNRESULT will be filled in with the vn_reference_t
    stored in the hashtable if one exists.  When TBAA_P is false assume
-   we are looking up a store and treat it as having alias-set zero.  */
+   we are looking up a store and treat it as having alias-set zero.
+   *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.  */
 
 tree
 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
-                    vn_reference_t *vnresult, bool tbaa_p)
+                    vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
 {
   vec<vn_reference_op_s> operands;
   struct vn_reference_s vr1;
@@ -2720,7 +2737,7 @@ vn_reference_lookup (tree op, tree vuse,
   vr1.operands = operands
     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
   vr1.type = TREE_TYPE (op);
-  vr1.set = tbaa_p ? get_alias_set (op) : 0;
+  vr1.set = get_alias_set (op);
   vr1.hashcode = vn_reference_compute_hash (&vr1);
   if ((cst = fully_constant_vn_reference_p (&vr1)))
     return cst;
@@ -2737,14 +2754,12 @@ vn_reference_lookup (tree op, tree vuse,
          || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
                                             vr1.operands))
        ao_ref_init (&r, op);
-      if (! tbaa_p)
-       r.ref_alias_set = r.base_alias_set = 0;
-      vn_walk_kind = kind;
+      vn_walk_cb_data data (&vr1, last_vuse_ptr, kind, tbaa_p);
       wvnresult =
-       (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
+       (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
                                                vn_reference_lookup_2,
                                                vn_reference_lookup_3,
-                                               vuse_valueize, limit, &vr1);
+                                               vuse_valueize, limit, &data);
       gcc_checking_assert (vr1.operands == shared_lookup_references);
       if (wvnresult)
        {
@@ -4099,10 +4114,8 @@ visit_reference_op_load (tree lhs, tree
   tree result;
 
   last_vuse = gimple_vuse (stmt);
-  last_vuse_ptr = &last_vuse;
   result = vn_reference_lookup (op, gimple_vuse (stmt),
-                               default_vn_walk_kind, NULL, true);
-  last_vuse_ptr = NULL;
+                               default_vn_walk_kind, NULL, true, &last_vuse);
 
   /* We handle type-punning through unions by value-numbering based
      on offset and size of the access.  Be prepared to handle a
Index: gcc/tree-ssa-sccvn.h
===================================================================
--- gcc/tree-ssa-sccvn.h        (revision 274504)
+++ gcc/tree-ssa-sccvn.h        (working copy)
@@ -234,7 +234,8 @@ vec<vn_reference_op_s> vn_reference_oper
 tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
                                 vec<vn_reference_op_s> ,
                                 vn_reference_t *, vn_lookup_kind);
-tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *, bool);
+tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *, bool,
+                         tree * = NULL);
 void vn_reference_lookup_call (gcall *, vn_reference_t *, vn_reference_t);
 vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
                                           vec<vn_reference_op_s> ,
Index: gcc/tree-ssa-scopedtables.c
===================================================================
--- gcc/tree-ssa-scopedtables.c (revision 274504)
+++ gcc/tree-ssa-scopedtables.c (working copy)
@@ -298,7 +298,7 @@ avail_exprs_stack::lookup_avail_expr (gi
            && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
            && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
                ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
-           && walk_non_aliased_vuses (&ref, vuse2, vuse_eq, NULL, NULL,
+           && walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
                                       limit, vuse1) != NULL))
        {
          if (insert)

Reply via email to