verify_expr ensured that the size and offset in gimple BIT_FIELD_REFs
satisfied tree_fits_uhwi_p.  This patch extends that so that they can
be poly_uint64s, and adds helper routines for accessing them when the
verify_expr requirements apply.


2017-10-23  Richard Sandiford  <richard.sandif...@linaro.org>
            Alan Hayward  <alan.hayw...@arm.com>
            David Sherwood  <david.sherw...@arm.com>

gcc/
        * tree.h (bit_field_size, bit_field_offset): New functions.
        * hsa-gen.c (gen_hsa_addr): Use them.
        * tree-ssa-forwprop.c (simplify_bitfield_ref): Likewise.
        (simplify_vector_constructor): Likewise.
        * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise.
        * tree-cfg.c (verify_expr): Require the sizes and offsets of a
        BIT_FIELD_REF to be poly_uint64s rather than uhwis.
        * fold-const.c (fold_ternary_loc): Protect tree_to_uhwi with
        tree_fits_uhwi_p.

Index: gcc/tree.h
===================================================================
--- gcc/tree.h  2017-10-23 17:18:47.668056833 +0100
+++ gcc/tree.h  2017-10-23 17:20:50.884679814 +0100
@@ -4764,6 +4764,24 @@ poly_int_tree_p (const_tree t)
   return (TREE_CODE (t) == INTEGER_CST || POLY_INT_CST_P (t));
 }
 
+/* Return the bit size of BIT_FIELD_REF T, in cases where it is known
+   to be a poly_uint64.  (This is always true at the gimple level.)  */
+
+inline poly_uint64
+bit_field_size (const_tree t)
+{
+  return tree_to_poly_uint64 (TREE_OPERAND (t, 1));
+}
+
+/* Return the starting bit offset of BIT_FIELD_REF T, in cases where it is
+   known to be a poly_uint64.  (This is always true at the gimple level.)  */
+
+inline poly_uint64
+bit_field_offset (const_tree t)
+{
+  return tree_to_poly_uint64 (TREE_OPERAND (t, 2));
+}
+
 extern tree strip_float_extensions (tree);
 extern int really_constant_p (const_tree);
 extern bool ptrdiff_tree_p (const_tree, poly_int64_pod *);
Index: gcc/hsa-gen.c
===================================================================
--- gcc/hsa-gen.c       2017-10-23 17:18:47.664057184 +0100
+++ gcc/hsa-gen.c       2017-10-23 17:20:50.882679875 +0100
@@ -1959,8 +1959,8 @@ gen_hsa_addr (tree ref, hsa_bb *hbb, HOS
       goto out;
     }
   else if (TREE_CODE (ref) == BIT_FIELD_REF
-          && ((tree_to_uhwi (TREE_OPERAND (ref, 1)) % BITS_PER_UNIT) != 0
-              || (tree_to_uhwi (TREE_OPERAND (ref, 2)) % BITS_PER_UNIT) != 0))
+          && (!multiple_p (bit_field_size (ref), BITS_PER_UNIT)
+              || !multiple_p (bit_field_offset (ref), BITS_PER_UNIT)))
     {
       HSA_SORRY_ATV (EXPR_LOCATION (origref),
                     "support for HSA does not implement "
Index: gcc/tree-ssa-forwprop.c
===================================================================
--- gcc/tree-ssa-forwprop.c     2017-10-23 17:17:01.434034223 +0100
+++ gcc/tree-ssa-forwprop.c     2017-10-23 17:20:50.883679845 +0100
@@ -1727,7 +1727,7 @@ simplify_bitfield_ref (gimple_stmt_itera
   gimple *def_stmt;
   tree op, op0, op1, op2;
   tree elem_type;
-  unsigned idx, n, size;
+  unsigned idx, size;
   enum tree_code code;
 
   op = gimple_assign_rhs1 (stmt);
@@ -1762,12 +1762,11 @@ simplify_bitfield_ref (gimple_stmt_itera
     return false;
 
   size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
-  n = TREE_INT_CST_LOW (op1) / size;
-  if (n != 1)
+  if (may_ne (bit_field_size (op), size))
     return false;
-  idx = TREE_INT_CST_LOW (op2) / size;
 
-  if (code == VEC_PERM_EXPR)
+  if (code == VEC_PERM_EXPR
+      && constant_multiple_p (bit_field_offset (op), size, &idx))
     {
       tree p, m, tem;
       unsigned nelts;
@@ -2020,9 +2019,10 @@ simplify_vector_constructor (gimple_stmt
            return false;
          orig = ref;
        }
-      if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
+      unsigned int elt;
+      if (may_ne (bit_field_size (op1), elem_size)
+         || !constant_multiple_p (bit_field_offset (op1), elem_size, &elt))
        return false;
-      unsigned int elt = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
       if (elt != i)
        maybe_ident = false;
       sel.quick_push (elt);
Index: gcc/tree-ssa-sccvn.c
===================================================================
--- gcc/tree-ssa-sccvn.c        2017-10-23 17:17:01.435034088 +0100
+++ gcc/tree-ssa-sccvn.c        2017-10-23 17:20:50.884679814 +0100
@@ -766,12 +766,8 @@ copy_reference_ops_from_ref (tree ref, v
          /* Record bits, position and storage order.  */
          temp.op0 = TREE_OPERAND (ref, 1);
          temp.op1 = TREE_OPERAND (ref, 2);
-         if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
-           {
-             HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
-             if (off % BITS_PER_UNIT == 0)
-               temp.off = off / BITS_PER_UNIT;
-           }
+         if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
+           temp.off = -1;
          temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
          break;
        case COMPONENT_REF:
Index: gcc/tree-cfg.c
===================================================================
--- gcc/tree-cfg.c      2017-10-23 17:11:40.247950952 +0100
+++ gcc/tree-cfg.c      2017-10-23 17:20:50.883679845 +0100
@@ -3054,8 +3054,9 @@ #define CHECK_OP(N, MSG) \
          tree t0 = TREE_OPERAND (t, 0);
          tree t1 = TREE_OPERAND (t, 1);
          tree t2 = TREE_OPERAND (t, 2);
-         if (!tree_fits_uhwi_p (t1)
-             || !tree_fits_uhwi_p (t2)
+         poly_uint64 size, bitpos;
+         if (!poly_int_tree_p (t1, &size)
+             || !poly_int_tree_p (t2, &bitpos)
              || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
              || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
            {
@@ -3063,8 +3064,7 @@ #define CHECK_OP(N, MSG) \
              return t;
            }
          if (INTEGRAL_TYPE_P (TREE_TYPE (t))
-             && (TYPE_PRECISION (TREE_TYPE (t))
-                 != tree_to_uhwi (t1)))
+             && may_ne (TYPE_PRECISION (TREE_TYPE (t)), size))
            {
              error ("integral result type precision does not match "
                     "field size of BIT_FIELD_REF");
@@ -3072,16 +3072,16 @@ #define CHECK_OP(N, MSG) \
            }
          else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
                   && TYPE_MODE (TREE_TYPE (t)) != BLKmode
-                  && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
-                      != tree_to_uhwi (t1)))
+                  && may_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t))),
+                             size))
            {
              error ("mode size of non-integral result does not "
                     "match field size of BIT_FIELD_REF");
              return t;
            }
          if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
-             && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
-                 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
+             && may_gt (size + bitpos,
+                        tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (t0)))))
            {
              error ("position plus size exceeds size of referenced object in "
                     "BIT_FIELD_REF");
Index: gcc/fold-const.c
===================================================================
--- gcc/fold-const.c    2017-10-23 17:18:47.662057360 +0100
+++ gcc/fold-const.c    2017-10-23 17:20:50.881679906 +0100
@@ -11728,7 +11728,9 @@ fold_ternary_loc (location_t loc, enum t
          fold (nearly) all BIT_FIELD_REFs.  */
       if (CONSTANT_CLASS_P (arg0)
          && can_native_interpret_type_p (type)
-         && BITS_PER_UNIT == 8)
+         && BITS_PER_UNIT == 8
+         && tree_fits_uhwi_p (op1)
+         && tree_fits_uhwi_p (op2))
        {
          unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
          unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);

Reply via email to