On Thu, 9 Jul 2015, Richard Biener wrote:

> 
> This moves more patterns that show up during bootstrap.
> 
> Bootstrap and regtest running on x86_64-unknown-linux-gnu.

Due to regressions caused I split off the fold_plusminus_mult_expr
part.

Bootstrapped and tested on x86_64-unknown-linux-gnu, committed.

Richard.

2015-07-09  Richard Biener  <rguent...@suse.de>

        * fold-const.c (distribute_bit_expr): Remove.
        (fold_binary_loc): Move simplifying (A & C1) + (B & C2)
        to (A & C1) | (B & C2), distributing (A & B) | (A & C)
        to A & (B | C) and simplifying A << C1 << C2 to ...
        * match.pd: ... patterns here.

Index: gcc/fold-const.c
===================================================================
*** gcc/fold-const.c    (revision 225657)
--- gcc/fold-const.c    (working copy)
*************** static enum tree_code compcode_to_compar
*** 117,123 ****
  static int operand_equal_for_comparison_p (tree, tree, tree);
  static int twoval_comparison_p (tree, tree *, tree *, int *);
  static tree eval_subst (location_t, tree, tree, tree, tree, tree);
- static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, 
tree);
  static tree make_bit_field_ref (location_t, tree, tree,
                                HOST_WIDE_INT, HOST_WIDE_INT, int);
  static tree optimize_bit_field_compare (location_t, enum tree_code,
--- 117,122 ----
*************** invert_truthvalue_loc (location_t loc, t
*** 3549,3610 ****
                          type, arg);
  }
  
- /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
-    operands are another bit-wise operation with a common input.  If so,
-    distribute the bit operations to save an operation and possibly two if
-    constants are involved.  For example, convert
-       (A | B) & (A | C) into A | (B & C)
-    Further simplification will occur if B and C are constants.
- 
-    If this optimization cannot be done, 0 will be returned.  */
- 
- static tree
- distribute_bit_expr (location_t loc, enum tree_code code, tree type,
-                    tree arg0, tree arg1)
- {
-   tree common;
-   tree left, right;
- 
-   if (TREE_CODE (arg0) != TREE_CODE (arg1)
-       || TREE_CODE (arg0) == code
-       || (TREE_CODE (arg0) != BIT_AND_EXPR
-         && TREE_CODE (arg0) != BIT_IOR_EXPR))
-     return 0;
- 
-   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
-     {
-       common = TREE_OPERAND (arg0, 0);
-       left = TREE_OPERAND (arg0, 1);
-       right = TREE_OPERAND (arg1, 1);
-     }
-   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 
0))
-     {
-       common = TREE_OPERAND (arg0, 0);
-       left = TREE_OPERAND (arg0, 1);
-       right = TREE_OPERAND (arg1, 0);
-     }
-   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 
0))
-     {
-       common = TREE_OPERAND (arg0, 1);
-       left = TREE_OPERAND (arg0, 0);
-       right = TREE_OPERAND (arg1, 1);
-     }
-   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 
0))
-     {
-       common = TREE_OPERAND (arg0, 1);
-       left = TREE_OPERAND (arg0, 0);
-       right = TREE_OPERAND (arg1, 0);
-     }
-   else
-     return 0;
- 
-   common = fold_convert_loc (loc, type, common);
-   left = fold_convert_loc (loc, type, left);
-   right = fold_convert_loc (loc, type, right);
-   return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
-                     fold_build2_loc (loc, code, type, left, right));
- }
- 
  /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
     with code CODE.  This optimization is unsafe.  */
  static tree
--- 3548,3553 ----
*************** fold_binary_loc (location_t loc,
*** 9574,9594 ****
  
        if (! FLOAT_TYPE_P (type))
        {
-         /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
-            with a constant, and the two constants have no bits in common,
-            we should treat this as a BIT_IOR_EXPR since this may produce more
-            simplifications.  */
-         if (TREE_CODE (arg0) == BIT_AND_EXPR
-             && TREE_CODE (arg1) == BIT_AND_EXPR
-             && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
-             && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
-             && wi::bit_and (TREE_OPERAND (arg0, 1),
-                             TREE_OPERAND (arg1, 1)) == 0)
-           {
-             code = BIT_IOR_EXPR;
-             goto bit_ior;
-           }
- 
          /* Reassociate (plus (plus (mult) (foo)) (mult)) as
             (plus (plus (mult) (mult)) (foo)) so that we can
             take advantage of the factoring cases below.  */
--- 9517,9522 ----
*************** fold_binary_loc (location_t loc,
*** 10422,10428 ****
        goto associate;
  
      case BIT_IOR_EXPR:
-     bit_ior:
        /* Canonicalize (X & C1) | C2.  */
        if (TREE_CODE (arg0) == BIT_AND_EXPR
          && TREE_CODE (arg1) == INTEGER_CST
--- 10350,10355 ----
*************** fold_binary_loc (location_t loc,
*** 10493,10502 ****
            return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
        }
  
-       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
-       if (t1 != NULL_TREE)
-       return t1;
- 
        /* See if this can be simplified into a rotate first.  If that
         is unsuccessful continue in the association code.  */
        goto bit_rotate;
--- 10420,10425 ----
*************** fold_binary_loc (location_t loc,
*** 10759,10767 ****
            }
        }
  
-       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
-       if (t1 != NULL_TREE)
-       return t1;
        /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
        if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
          && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
--- 10682,10687 ----
*************** fold_binary_loc (location_t loc,
*** 11110,11141 ****
  
        prec = element_precision (type);
  
-       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
-       if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
-         && tree_to_uhwi (arg1) < prec
-         && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
-         && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
-       {
-         unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
-                             + tree_to_uhwi (arg1));
- 
-         /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
-            being well defined.  */
-         if (low >= prec)
-           {
-             if (code == LROTATE_EXPR || code == RROTATE_EXPR)
-               low = low % prec;
-             else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
-               return omit_one_operand_loc (loc, type, build_zero_cst (type),
-                                        TREE_OPERAND (arg0, 0));
-             else
-               low = prec - 1;
-           }
- 
-         return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
-                                 build_int_cst (TREE_TYPE (arg1), low));
-       }
- 
        /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
           into x & ((unsigned)-1 >> c) for unsigned types.  */
        if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
--- 11030,11035 ----
Index: gcc/match.pd
===================================================================
*** gcc/match.pd        (revision 225657)
--- gcc/match.pd        (working copy)
*************** (define_operator_list CBRT BUILT_IN_CBRT
*** 419,435 ****
         && tree_nop_conversion_p (type, TREE_TYPE (@1)))
     (bit_not (rop (convert @0) (convert @1))))))
  
! /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
     with a constant, and the two constants have no bits in common,
     we should treat this as a BIT_IOR_EXPR since this may produce more
     simplifications.  */
! (simplify
!  (bit_xor (convert1? (bit_and@4 @0 INTEGER_CST@1))
!           (convert2? (bit_and@5 @2 INTEGER_CST@3)))
!  (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
!       && tree_nop_conversion_p (type, TREE_TYPE (@2))
!       && wi::bit_and (@1, @3) == 0)
!   (bit_ior (convert @4) (convert @5))))
  
  /* (X | Y) ^ X -> Y & ~ X*/
  (simplify
--- 419,436 ----
         && tree_nop_conversion_p (type, TREE_TYPE (@1)))
     (bit_not (rop (convert @0) (convert @1))))))
  
! /* If we are XORing or adding two BIT_AND_EXPR's, both of which are and'ing
     with a constant, and the two constants have no bits in common,
     we should treat this as a BIT_IOR_EXPR since this may produce more
     simplifications.  */
! (for op (bit_xor plus)
!  (simplify
!   (op (convert1? (bit_and@4 @0 INTEGER_CST@1))
!       (convert2? (bit_and@5 @2 INTEGER_CST@3)))
!   (if (tree_nop_conversion_p (type, TREE_TYPE (@0))
!        && tree_nop_conversion_p (type, TREE_TYPE (@2))
!        && wi::bit_and (@1, @3) == 0)
!    (bit_ior (convert @4) (convert @5)))))
  
  /* (X | Y) ^ X -> Y & ~ X*/
  (simplify
*************** (define_operator_list CBRT BUILT_IN_CBRT
*** 455,460 ****
--- 456,474 ----
   (bit_xor:c (bit_and:c @0 @1) @1)
   (bit_and (bit_not @0) @1))
  
+ /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
+    operands are another bit-wise operation with a common input.  If so,
+    distribute the bit operations to save an operation and possibly two if
+    constants are involved.  For example, convert
+      (A | B) & (A | C) into A | (B & C)
+    Further simplification will occur if B and C are constants.  */
+ (for op (bit_and bit_ior)
+      rop (bit_ior bit_and)
+  (simplify
+   (op (convert? (rop:c @0 @1)) (convert? (rop @0 @2)))
+   (if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
+    (rop (convert @0) (op (convert @1) (convert @2))))))
+ 
  
  (simplify
   (abs (abs@1 @0))
*************** (define_operator_list CBRT BUILT_IN_CBRT
*** 880,885 ****
--- 894,920 ----
                            build_int_cst (TREE_TYPE (@1),
                                           element_precision (type)), @1); }))
  
+ /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
+ (for op (lrotate rrotate rshift lshift)
+  (simplify
+   (op (op @0 INTEGER_CST@1) INTEGER_CST@2)
+   (with { unsigned int prec = element_precision (type); }
+    (if (wi::ge_p (@1, 0, TYPE_SIGN (TREE_TYPE (@1)))
+         && wi::lt_p (@1, prec, TYPE_SIGN (TREE_TYPE (@1)))
+         && wi::ge_p (@2, 0, TYPE_SIGN (TREE_TYPE (@2)))
+       && wi::lt_p (@2, prec, TYPE_SIGN (TREE_TYPE (@2))))
+     (with { unsigned int low = wi::add (@1, @2).to_uhwi (); }
+      /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
+         being well defined.  */
+      (if (low >= prec)
+       (if (op == LROTATE_EXPR || op == RROTATE_EXPR)
+        (op @0 { build_int_cst (TREE_TYPE (@1), low % prec); }))
+       (if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
+        { build_zero_cst (type); })
+       (op @0 { build_int_cst (TREE_TYPE (@1), prec - 1); }))
+      (op @0 { build_int_cst (TREE_TYPE (@1), low); }))))))
+ 
+ 
  /* ((1 << A) & 1) != 0 -> A == 0
     ((1 << A) & 1) == 0 -> A != 0 */
  (for cmp (ne eq)

Reply via email to