The following patch avoids doing tail-merging work when not in PRE.
It also avoids dumping the shared_lookup_references vector and
avoids reallocating when visiting calls.  It also hides APIs
of SCCVN that are not used outside.

I'd like to get rid of the tail-merging - SCCVN interaction for
GCC 5 (now that PRE value-replaces we re-use the value-numbering
result in most cases - apart from the code hoisting case PRE
doesn't perform).

Bootstrapped on x86_64-unknown-linux-gnu, testing in progress.

Richard.

2014-09-01  Richard Biener  <rguent...@suse.de>

        * tree-ssa-sccvn.h (copy_reference_ops_from_ref,
        copy_reference_ops_from_call, vn_nary_op_compute_hash,
        vn_reference_compute_hash, vn_reference_insert): Remove.
        (vn_reference_lookup_call): New function.
        * tree-ssa-sccvn.c (vn_reference_compute_hash,
        copy_reference_ops_from_ref, copy_reference_ops_from_call,
        vn_reference_insert, vn_nary_op_compute_hash): Make static.
        (create_reference_ops_from_call): Remove.
        (vn_reference_lookup_3): Properly update shared_lookup_references.
        (vn_reference_lookup_pieces): Assert that we updated
        shared_lookup_references properly.
        (vn_reference_lookup): Likewise.
        (vn_reference_lookup_call): New function.
        (visit_reference_op_call): Use it.  Avoid re-building the
        reference ops.
        (visit_reference_op_load): Remove redundant lookup.
        (visit_reference_op_store): Perform special tail-merging work
        only when possibly doing tail-merging.
        (visit_use): Likewise.
        * tree-ssa-pre.c (compute_avail): Use vn_reference_lookup_call.

Index: trunk/gcc/tree-ssa-pre.c
===================================================================
*** trunk.orig/gcc/tree-ssa-pre.c       2014-08-29 11:33:16.955047283 +0200
--- trunk/gcc/tree-ssa-pre.c    2014-09-01 10:16:57.538516963 +0200
*************** compute_avail (void)
*** 3789,3805 ****
            case GIMPLE_CALL:
              {
                vn_reference_t ref;
                pre_expr result = NULL;
-               auto_vec<vn_reference_op_s> ops;
  
                /* We can value number only calls to real functions.  */
                if (gimple_call_internal_p (stmt))
                  continue;
  
!               copy_reference_ops_from_call (stmt, &ops);
!               vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
!                                           gimple_expr_type (stmt),
!                                           ops, &ref, VN_NOWALK);
                if (!ref)
                  continue;
  
--- 3789,3802 ----
            case GIMPLE_CALL:
              {
                vn_reference_t ref;
+               vn_reference_s ref1;
                pre_expr result = NULL;
  
                /* We can value number only calls to real functions.  */
                if (gimple_call_internal_p (stmt))
                  continue;
  
!               vn_reference_lookup_call (stmt, &ref, &ref1);
                if (!ref)
                  continue;
  
Index: trunk/gcc/tree-ssa-sccvn.c
===================================================================
*** trunk.orig/gcc/tree-ssa-sccvn.c     2014-08-08 11:30:38.971977411 +0200
--- trunk/gcc/tree-ssa-sccvn.c  2014-09-01 11:19:42.960257718 +0200
*************** vn_reference_op_compute_hash (const vn_r
*** 619,625 ****
  
  /* Compute a hash for the reference operation VR1 and return it.  */
  
! hashval_t
  vn_reference_compute_hash (const vn_reference_t vr1)
  {
    inchash::hash hstate;
--- 619,625 ----
  
  /* Compute a hash for the reference operation VR1 and return it.  */
  
! static hashval_t
  vn_reference_compute_hash (const vn_reference_t vr1)
  {
    inchash::hash hstate;
*************** vn_reference_eq (const_vn_reference_t co
*** 767,773 ****
  /* Copy the operations present in load/store REF into RESULT, a vector of
     vn_reference_op_s's.  */
  
! void
  copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
  {
    if (TREE_CODE (ref) == TARGET_MEM_REF)
--- 767,773 ----
  /* Copy the operations present in load/store REF into RESULT, a vector of
     vn_reference_op_s's.  */
  
! static void
  copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
  {
    if (TREE_CODE (ref) == TARGET_MEM_REF)
*************** ao_ref_init_from_vn_reference (ao_ref *r
*** 1135,1141 ****
  /* Copy the operations present in load/store/call REF into RESULT, a vector of
     vn_reference_op_s's.  */
  
! void
  copy_reference_ops_from_call (gimple call,
                              vec<vn_reference_op_s> *result)
  {
--- 1135,1141 ----
  /* Copy the operations present in load/store/call REF into RESULT, a vector of
     vn_reference_op_s's.  */
  
! static void
  copy_reference_ops_from_call (gimple call,
                              vec<vn_reference_op_s> *result)
  {
*************** copy_reference_ops_from_call (gimple cal
*** 1177,1194 ****
      }
  }
  
- /* Create a vector of vn_reference_op_s structures from CALL, a
-    call statement.  The vector is not shared.  */
- 
- static vec<vn_reference_op_s> 
- create_reference_ops_from_call (gimple call)
- {
-   vec<vn_reference_op_s> result = vNULL;
- 
-   copy_reference_ops_from_call (call, &result);
-   return result;
- }
- 
  /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
     *I_P to point to the last element of the replacement.  */
  void
--- 1177,1182 ----
*************** vn_reference_lookup_3 (ao_ref *ref, tree
*** 1904,1922 ****
        /* Now re-write REF to be based on the rhs of the assignment.  */
        copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
        /* We need to pre-pend vr->operands[0..i] to rhs.  */
        if (i + 1 + rhs.length () > vr->operands.length ())
        {
-         vec<vn_reference_op_s> old = vr->operands;
          vr->operands.safe_grow (i + 1 + rhs.length ());
!         if (old == shared_lookup_references
!             && vr->operands != old)
!           shared_lookup_references = vNULL;
        }
        else
        vr->operands.truncate (i + 1 + rhs.length ());
        FOR_EACH_VEC_ELT (rhs, j, vro)
        vr->operands[i + 1 + j] = *vro;
        vr->operands = valueize_refs (vr->operands);
        vr->hashcode = vn_reference_compute_hash (vr);
  
        /* Adjust *ref from the new operands.  */
--- 1892,1911 ----
        /* Now re-write REF to be based on the rhs of the assignment.  */
        copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
        /* We need to pre-pend vr->operands[0..i] to rhs.  */
+       vec<vn_reference_op_s> old = vr->operands;
        if (i + 1 + rhs.length () > vr->operands.length ())
        {
          vr->operands.safe_grow (i + 1 + rhs.length ());
!         if (old == shared_lookup_references)
!           shared_lookup_references = vr->operands;
        }
        else
        vr->operands.truncate (i + 1 + rhs.length ());
        FOR_EACH_VEC_ELT (rhs, j, vro)
        vr->operands[i + 1 + j] = *vro;
        vr->operands = valueize_refs (vr->operands);
+       if (old == shared_lookup_references)
+       shared_lookup_references = vr->operands;
        vr->hashcode = vn_reference_compute_hash (vr);
  
        /* Adjust *ref from the new operands.  */
*************** vn_reference_lookup_3 (ao_ref *ref, tree
*** 2041,2047 ****
          vr->operands.safe_grow_cleared (2);
          if (old == shared_lookup_references
              && vr->operands != old)
!           shared_lookup_references.create (0);
        }
        else
        vr->operands.truncate (2);
--- 2030,2036 ----
          vr->operands.safe_grow_cleared (2);
          if (old == shared_lookup_references
              && vr->operands != old)
!           shared_lookup_references = vr->operands;
        }
        else
        vr->operands.truncate (2);
*************** vn_reference_lookup_pieces (tree vuse, a
*** 2124,2131 ****
          (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
                                                  vn_reference_lookup_2,
                                                  vn_reference_lookup_3, &vr1);
!       if (vr1.operands != operands)
!       vr1.operands.release ();
      }
  
    if (*vnresult)
--- 2113,2119 ----
          (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
                                                  vn_reference_lookup_2,
                                                  vn_reference_lookup_3, &vr1);
!       gcc_checking_assert (vr1.operands == shared_lookup_references);
      }
  
    if (*vnresult)
*************** vn_reference_lookup (tree op, tree vuse,
*** 2177,2184 ****
        (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
                                                vn_reference_lookup_2,
                                                vn_reference_lookup_3, &vr1);
!       if (vr1.operands != operands)
!       vr1.operands.release ();
        if (wvnresult)
        {
          if (vnresult)
--- 2165,2171 ----
        (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
                                                vn_reference_lookup_2,
                                                vn_reference_lookup_3, &vr1);
!       gcc_checking_assert (vr1.operands == shared_lookup_references);
        if (wvnresult)
        {
          if (vnresult)
*************** vn_reference_lookup (tree op, tree vuse,
*** 2192,2202 ****
    return vn_reference_lookup_1 (&vr1, vnresult);
  }
  
  
  /* Insert OP into the current hash table with a value number of
     RESULT, and return the resulting reference structure we created.  */
  
! vn_reference_t
  vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
  {
    vn_reference_s **slot;
--- 2179,2205 ----
    return vn_reference_lookup_1 (&vr1, vnresult);
  }
  
+ /* Lookup CALL in the current hash table and return the entry in
+    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
+ 
+ void
+ vn_reference_lookup_call (gimple call, vn_reference_t *vnresult,
+                         vn_reference_t vr)
+ {
+   tree vuse = gimple_vuse (call);
+ 
+   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
+   vr->operands = valueize_shared_reference_ops_from_call (call);
+   vr->type = gimple_expr_type (call);
+   vr->set = 0;
+   vr->hashcode = vn_reference_compute_hash (vr);
+   vn_reference_lookup_1 (vr, vnresult);
+ }
  
  /* Insert OP into the current hash table with a value number of
     RESULT, and return the resulting reference structure we created.  */
  
! static vn_reference_t
  vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
  {
    vn_reference_s **slot;
*************** vn_reference_insert_pieces (tree vuse, a
*** 2275,2281 ****
  
  /* Compute and return the hash value for nary operation VBO1.  */
  
! hashval_t
  vn_nary_op_compute_hash (const vn_nary_op_t vno1)
  {
    inchash::hash hstate;
--- 2278,2284 ----
  
  /* Compute and return the hash value for nary operation VBO1.  */
  
! static hashval_t
  vn_nary_op_compute_hash (const vn_nary_op_t vno1)
  {
    inchash::hash hstate;
*************** visit_reference_op_call (tree lhs, gimpl
*** 2885,2904 ****
    bool changed = false;
    struct vn_reference_s vr1;
    vn_reference_t vnresult = NULL;
-   tree vuse = gimple_vuse (stmt);
    tree vdef = gimple_vdef (stmt);
  
    /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
    if (lhs && TREE_CODE (lhs) != SSA_NAME)
      lhs = NULL_TREE;
  
!   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
!   vr1.operands = valueize_shared_reference_ops_from_call (stmt);
!   vr1.type = gimple_expr_type (stmt);
!   vr1.set = 0;
!   vr1.hashcode = vn_reference_compute_hash (&vr1);
!   vn_reference_lookup_1 (&vr1, &vnresult);
! 
    if (vnresult)
      {
        if (vnresult->result_vdef && vdef)
--- 2888,2900 ----
    bool changed = false;
    struct vn_reference_s vr1;
    vn_reference_t vnresult = NULL;
    tree vdef = gimple_vdef (stmt);
  
    /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
    if (lhs && TREE_CODE (lhs) != SSA_NAME)
      lhs = NULL_TREE;
  
!   vn_reference_lookup_call (stmt, &vnresult, &vr1);
    if (vnresult)
      {
        if (vnresult->result_vdef && vdef)
*************** visit_reference_op_call (tree lhs, gimpl
*** 2917,2931 ****
      }
    else
      {
-       vn_reference_s **slot;
        vn_reference_t vr2;
        if (vdef)
        changed |= set_ssa_val_to (vdef, vdef);
        if (lhs)
        changed |= set_ssa_val_to (lhs, lhs);
        vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
        vr2->vuse = vr1.vuse;
!       vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
        vr2->type = vr1.type;
        vr2->set = vr1.set;
        vr2->hashcode = vr1.hashcode;
--- 2913,2930 ----
      }
    else
      {
        vn_reference_t vr2;
+       vn_reference_s **slot;
        if (vdef)
        changed |= set_ssa_val_to (vdef, vdef);
        if (lhs)
        changed |= set_ssa_val_to (lhs, lhs);
        vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
        vr2->vuse = vr1.vuse;
!       /* As we are not walking the virtual operand chain we know the
!        shared_lookup_references are still original so we can re-use
!        them here.  */
!       vr2->operands = vr1.operands.copy ();
        vr2->type = vr1.type;
        vr2->set = vr1.set;
        vr2->hashcode = vr1.hashcode;
*************** visit_reference_op_call (tree lhs, gimpl
*** 2933,2940 ****
        vr2->result_vdef = vdef;
        slot = current_info->references->find_slot_with_hash (vr2, 
vr2->hashcode,
                                                            INSERT);
!       if (*slot)
!       free_reference (*slot);
        *slot = vr2;
      }
  
--- 2932,2938 ----
        vr2->result_vdef = vdef;
        slot = current_info->references->find_slot_with_hash (vr2, 
vr2->hashcode,
                                                            INSERT);
!       gcc_assert (!*slot);
        *slot = vr2;
      }
  
*************** visit_reference_op_load (tree lhs, tree
*** 2957,2968 ****
                                default_vn_walk_kind, NULL);
    last_vuse_ptr = NULL;
  
-   /* If we have a VCE, try looking up its operand as it might be stored in
-      a different type.  */
-   if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
-     result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
-                                 default_vn_walk_kind, NULL);
- 
    /* We handle type-punning through unions by value-numbering based
       on offset and size of the access.  Be prepared to handle a
       type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
--- 2955,2960 ----
*************** visit_reference_op_store (tree lhs, tree
*** 3087,3093 ****
        resultsame = expressions_equal_p (result, op);
      }
  
!   if (!result || !resultsame)
      {
        assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
        vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
--- 3079,3088 ----
        resultsame = expressions_equal_p (result, op);
      }
  
!   if ((!result || !resultsame)
!       /* Only perform the following when being called from PRE
!        which embeds tail merging.  */
!       && default_vn_walk_kind == VN_WALK)
      {
        assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
        vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
*************** visit_reference_op_store (tree lhs, tree
*** 3121,3128 ****
          || is_gimple_reg (op))
          vn_reference_insert (lhs, op, vdef, NULL);
  
!       assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
!       vn_reference_insert (assign, lhs, vuse, vdef);
      }
    else
      {
--- 3116,3128 ----
          || is_gimple_reg (op))
          vn_reference_insert (lhs, op, vdef, NULL);
  
!       /* Only perform the following when being called from PRE
!        which embeds tail merging.  */
!       if (default_vn_walk_kind == VN_WALK)
!       {
!         assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
!         vn_reference_insert (assign, lhs, vuse, vdef);
!       }
      }
    else
      {
*************** visit_use (tree use)
*** 3703,3709 ****
                         not alias with anything else.  In which case the
                         information that the values are distinct are encoded
                         in the IL.  */
!                     && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
            changed = visit_reference_op_call (lhs, stmt);
          else
            changed = defs_to_varying (stmt);
--- 3703,3712 ----
                         not alias with anything else.  In which case the
                         information that the values are distinct are encoded
                         in the IL.  */
!                     && !(gimple_call_return_flags (stmt) & ERF_NOALIAS)
!                     /* Only perform the following when being called from PRE
!                        which embeds tail merging.  */
!                     && default_vn_walk_kind == VN_WALK)))
            changed = visit_reference_op_call (lhs, stmt);
          else
            changed = defs_to_varying (stmt);
Index: trunk/gcc/tree-ssa-sccvn.h
===================================================================
*** trunk.orig/gcc/tree-ssa-sccvn.h     2014-08-01 14:36:49.882848057 +0200
--- trunk/gcc/tree-ssa-sccvn.h  2014-09-01 10:21:42.870497318 +0200
*************** vn_nary_op_t vn_nary_op_insert_pieces (u
*** 204,227 ****
                                       tree, tree *, tree, unsigned int);
  void vn_reference_fold_indirect (vec<vn_reference_op_s> *,
                                 unsigned int *);
- void copy_reference_ops_from_ref (tree, vec<vn_reference_op_s> *);
- void copy_reference_ops_from_call (gimple, vec<vn_reference_op_s> *);
  bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree,
                                    vec<vn_reference_op_s> );
  tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
                                 vec<vn_reference_op_s> ,
                                 vn_reference_t *, vn_lookup_kind);
  tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *);
! vn_reference_t vn_reference_insert (tree, tree, tree, tree);
  vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
                                           vec<vn_reference_op_s> ,
                                           tree, unsigned int);
  
- hashval_t vn_nary_op_compute_hash (const vn_nary_op_t);
  bool vn_nary_op_eq (const_vn_nary_op_t const vno1,
                    const_vn_nary_op_t const vno2);
  bool vn_nary_may_trap (vn_nary_op_t);
- hashval_t vn_reference_compute_hash (const vn_reference_t);
  bool vn_reference_eq (const_vn_reference_t const, const_vn_reference_t const);
  unsigned int get_max_value_id (void);
  unsigned int get_next_value_id (void);
--- 204,223 ----
                                       tree, tree *, tree, unsigned int);
  void vn_reference_fold_indirect (vec<vn_reference_op_s> *,
                                 unsigned int *);
  bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree,
                                    vec<vn_reference_op_s> );
  tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
                                 vec<vn_reference_op_s> ,
                                 vn_reference_t *, vn_lookup_kind);
  tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *);
! void vn_reference_lookup_call (gimple, vn_reference_t *, vn_reference_t);
  vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
                                           vec<vn_reference_op_s> ,
                                           tree, unsigned int);
  
  bool vn_nary_op_eq (const_vn_nary_op_t const vno1,
                    const_vn_nary_op_t const vno2);
  bool vn_nary_may_trap (vn_nary_op_t);
  bool vn_reference_eq (const_vn_reference_t const, const_vn_reference_t const);
  unsigned int get_max_value_id (void);
  unsigned int get_next_value_id (void);

Reply via email to