On December 22, 2015 4:24:40 PM GMT+01:00, Jakub Jelinek <ja...@redhat.com> 
wrote:
>Hi!
>
>As the testcase shows, we have some regressions in constexpr folding in
>GCC
>5+.
>One issue is that fold_binary_loc attempts to swap operands of
>comparison
>operators only if one of them is constant, most of the optimizations in
>there are written for constant in arg1 and therefore ok, but the two
>checks
>have both operands non-constant and were just folding a.e + 1 != a.e
>and
>not a.e + 1 != a.e.
>Another issue is that fold_comparison does not optimize this, even when
>it
>has code to do so.  For ADDR_EXPR it uses get_inner_reference, but for
>POINTER_PLUS_EXPR if the first operand of it is ADDR_EXPR does not use
>get_inner_reference, therefore in this case we have base a in one case
>and
>base a.e in another case.
>
>Fixed thusly, bootstrapped/regtested on x86_64-linux and i686-linux, ok
>for
>trunk?

OK.

Thanks,
Richard.

>2015-12-22  Jakub Jelinek  <ja...@redhat.com>
>
>       PR c++/67376
>       * fold-const.c (size_low_cst): Removed.
>       (fold_comparison): For POINTER_PLUS_EXPR where base is ADDR_EXPR
>       call get_inner_reference and handle INDIRECT_REF base of it.  Use
>       offset_int for computation of the bitpos.
>       (fold_binary_loc) <case EQ_EXPR, NE_EXPR>: Formatting
>       fixes for X +- Y CMP X and C - X CMP X folding.  Add X CMP X +- Y
>       and X CMP C - X folding.
>
>       * g++.dg/cpp0x/constexpr-67376.C: New test.
>
>--- gcc/fold-const.c.jj        2015-12-07 22:04:39.000000000 +0100
>+++ gcc/fold-const.c   2015-12-22 14:18:50.099415697 +0100
>@@ -8266,20 +8266,6 @@ pointer_may_wrap_p (tree base, tree offs
>   return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
> }
> 
>-/* Return the HOST_WIDE_INT least significant bits of T, a sizetype
>-   kind INTEGER_CST.  This makes sure to properly sign-extend the
>-   constant.  */
>-
>-static HOST_WIDE_INT
>-size_low_cst (const_tree t)
>-{
>-  HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
>-  int prec = TYPE_PRECISION (TREE_TYPE (t));
>-  if (prec < HOST_BITS_PER_WIDE_INT)
>-    return sext_hwi (w, prec);
>-  return w;
>-}
>-
> /* Subroutine of fold_binary.  This routine performs all of the
>    transformations that are common to the equality/inequality
>    operators (EQ_EXPR and NE_EXPR) and the ordering operators
>@@ -8408,18 +8394,30 @@ fold_comparison (location_t loc, enum tr
>         STRIP_SIGN_NOPS (base0);
>         if (TREE_CODE (base0) == ADDR_EXPR)
>           {
>-            base0 = TREE_OPERAND (base0, 0);
>-            indirect_base0 = true;
>+            base0
>+              = get_inner_reference (TREE_OPERAND (base0, 0),
>+                                     &bitsize, &bitpos0, &offset0, &mode,
>+                                     &unsignedp, &reversep, &volatilep,
>+                                     false);
>+            if (TREE_CODE (base0) == INDIRECT_REF)
>+              base0 = TREE_OPERAND (base0, 0);
>+            else
>+              indirect_base0 = true;
>           }
>-        offset0 = TREE_OPERAND (arg0, 1);
>-        if (tree_fits_shwi_p (offset0))
>-          {
>-            HOST_WIDE_INT off = size_low_cst (offset0);
>-            if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
>-                                 * BITS_PER_UNIT)
>-                / BITS_PER_UNIT == (HOST_WIDE_INT) off)
>+        if (offset0 == NULL_TREE || integer_zerop (offset0))
>+          offset0 = TREE_OPERAND (arg0, 1);
>+        else
>+          offset0 = size_binop (PLUS_EXPR, offset0,
>+                                TREE_OPERAND (arg0, 1));
>+        if (TREE_CODE (offset0) == INTEGER_CST)
>+          {
>+            offset_int tem = wi::sext (wi::to_offset (offset0),
>+                                       TYPE_PRECISION (sizetype));
>+            tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
>+            tem += bitpos0;
>+            if (wi::fits_shwi_p (tem))
>               {
>-                bitpos0 = off * BITS_PER_UNIT;
>+                bitpos0 = tem.to_shwi ();
>                 offset0 = NULL_TREE;
>               }
>           }
>@@ -8443,18 +8441,30 @@ fold_comparison (location_t loc, enum tr
>         STRIP_SIGN_NOPS (base1);
>         if (TREE_CODE (base1) == ADDR_EXPR)
>           {
>-            base1 = TREE_OPERAND (base1, 0);
>-            indirect_base1 = true;
>+            base1
>+              = get_inner_reference (TREE_OPERAND (base1, 0),
>+                                     &bitsize, &bitpos1, &offset1, &mode,
>+                                     &unsignedp, &reversep, &volatilep,
>+                                     false);
>+            if (TREE_CODE (base1) == INDIRECT_REF)
>+              base1 = TREE_OPERAND (base1, 0);
>+            else
>+              indirect_base1 = true;
>           }
>-        offset1 = TREE_OPERAND (arg1, 1);
>-        if (tree_fits_shwi_p (offset1))
>-          {
>-            HOST_WIDE_INT off = size_low_cst (offset1);
>-            if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
>-                                 * BITS_PER_UNIT)
>-                / BITS_PER_UNIT == (HOST_WIDE_INT) off)
>+        if (offset1 == NULL_TREE || integer_zerop (offset1))
>+          offset1 = TREE_OPERAND (arg1, 1);
>+        else
>+          offset1 = size_binop (PLUS_EXPR, offset1,
>+                                TREE_OPERAND (arg1, 1));
>+        if (TREE_CODE (offset1) == INTEGER_CST)
>+          {
>+            offset_int tem = wi::sext (wi::to_offset (offset1),
>+                                       TYPE_PRECISION (sizetype));
>+            tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
>+            tem += bitpos1;
>+            if (wi::fits_shwi_p (tem))
>               {
>-                bitpos1 = off * BITS_PER_UNIT;
>+                bitpos1 = tem.to_shwi ();
>                 offset1 = NULL_TREE;
>               }
>           }
>@@ -10547,12 +10557,27 @@ fold_binary_loc (location_t loc,
>             || POINTER_TYPE_P (TREE_TYPE (arg0))))
>       {
>         tree val = TREE_OPERAND (arg0, 1);
>-        return omit_two_operands_loc (loc, type,
>-                                  fold_build2_loc (loc, code, type,
>-                                               val,
>-                                               build_int_cst (TREE_TYPE (val),
>-                                                              0)),
>-                                  TREE_OPERAND (arg0, 0), arg1);
>+        val = fold_build2_loc (loc, code, type, val,
>+                               build_int_cst (TREE_TYPE (val), 0));
>+        return omit_two_operands_loc (loc, type, val,
>+                                      TREE_OPERAND (arg0, 0), arg1);
>+      }
>+
>+      /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. 
>*/
>+      if ((TREE_CODE (arg1) == PLUS_EXPR
>+         || TREE_CODE (arg1) == POINTER_PLUS_EXPR
>+         || TREE_CODE (arg1) == MINUS_EXPR)
>+        && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
>+                                                                      0)),
>+                            arg0, 0)
>+        && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
>+            || POINTER_TYPE_P (TREE_TYPE (arg1))))
>+      {
>+        tree val = TREE_OPERAND (arg1, 1);
>+        val = fold_build2_loc (loc, code, type, val,
>+                               build_int_cst (TREE_TYPE (val), 0));
>+        return omit_two_operands_loc (loc, type, val,
>+                                      TREE_OPERAND (arg1, 0), arg0);
>       }
> 
>    /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
>@@ -10562,12 +10587,22 @@ fold_binary_loc (location_t loc,
>                                                                       1)),
>                             arg1, 0)
>         && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
>-      {
>-        return omit_two_operands_loc (loc, type,
>-                                  code == NE_EXPR
>-                                  ? boolean_true_node : boolean_false_node,
>-                                  TREE_OPERAND (arg0, 1), arg1);
>-      }
>+      return omit_two_operands_loc (loc, type,
>+                                    code == NE_EXPR
>+                                    ? boolean_true_node : boolean_false_node,
>+                                    TREE_OPERAND (arg0, 1), arg1);
>+
>+      /* Transform comparisons of the form X CMP C - X if C % 2 == 1. 
>*/
>+      if (TREE_CODE (arg1) == MINUS_EXPR
>+        && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
>+        && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
>+                                                                      1)),
>+                            arg0, 0)
>+        && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
>+      return omit_two_operands_loc (loc, type,
>+                                    code == NE_EXPR
>+                                    ? boolean_true_node : boolean_false_node,
>+                                    TREE_OPERAND (arg1, 1), arg0);
> 
>       /* If this is an EQ or NE comparison with zero and ARG0 is
>        (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
>--- gcc/testsuite/g++.dg/cpp0x/constexpr-67376.C.jj    2015-12-22
>11:31:34.937974400 +0100
>+++ gcc/testsuite/g++.dg/cpp0x/constexpr-67376.C       2015-12-22
>14:21:51.678892129 +0100
>@@ -0,0 +1,17 @@
>+// PR c++/67376
>+// { dg-do compile { target c++11 } }
>+
>+struct A { int e[2]; };
>+constexpr A a { { 0, 1 } };
>+static_assert (a.e + 1 != a.e, "");
>+static_assert (a.e != a.e + 1, "");
>+static_assert (a.e + 2 != a.e, "");
>+static_assert (a.e != a.e + 2, "");
>+static_assert (a.e + 1 > a.e, "");
>+static_assert (a.e < a.e + 1, "");
>+static_assert (a.e + 2 > a.e, "");
>+static_assert (a.e < a.e + 2, "");
>+static_assert (a.e + 1 >= a.e, "");
>+static_assert (a.e <= a.e + 1, "");
>+static_assert (a.e + 2 >= a.e, "");
>+static_assert (a.e <= a.e + 2, "");
>
>       Jakub


Reply via email to