Hi,
When I modified GCC to change the majority of bitfield accesses
which were done via component ref to BIT_FIELD_REF, SRA messes up
because when it does the replacement it still tries to use the
BIT_FIELD_REF except it never places the old value in the struct for
the BIT_FIELD_REF to work correctly.
This patch fixes the problem by expanding what BIT_FIELD_REF does
internally for both writing and reading. Note we can't use
BIT_FIELD_REF directly on the left hand since we don't support partial
writes yet (except for vector and complex types).
This is only a RFC since I don't know a way to reproduce this bug on
the trunk. I tested it on x86_64-linux-gnu with no regressions.
Thanks,
Andrew Pinski
ChangeLog:
* tree-sra.c (sra_modify_expr): Handle BIT_FIELD_REF specially if we
are doing a replacement of the struct with one variable.
Index: tree-sra.c
===================================================================
--- tree-sra.c (revision 73809)
+++ tree-sra.c (working copy)
@@ -2648,6 +2648,7 @@ sra_modify_expr (tree *expr, gimple_stmt
location_t loc;
struct access *access;
tree type, bfr;
+ tree *orig_expr = expr;
if (TREE_CODE (*expr) == BIT_FIELD_REF)
{
@@ -2668,6 +2669,56 @@ sra_modify_expr (tree *expr, gimple_stmt
if (access->grp_to_be_replaced)
{
tree repl = get_access_replacement (access);
+ /* BIT_FIELD_REFs with writes have to be handled specially. */
+ if (bfr)
+ {
+ tree mask;
+ HOST_WIDE_INT nbitsize;
+ HOST_WIDE_INT lbitsize;
+ HOST_WIDE_INT lbitpos;
+ tree unsignedtype;
+ gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (repl)));
+
+ unsignedtype = signed_or_unsigned_type_for (1, TREE_TYPE (repl));
+ nbitsize = TYPE_PRECISION (TREE_TYPE (repl));
+ lbitsize = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
+ lbitpos = tree_low_cst (TREE_OPERAND (bfr, 2), 1);
+ mask = build_int_cst_type (unsignedtype, -1);
+ mask = int_const_binop (LSHIFT_EXPR, mask, size_int (nbitsize -
lbitsize));
+ mask = int_const_binop (RSHIFT_EXPR, mask,
+ size_int (nbitsize - lbitsize - lbitpos));
+ if (write)
+ {
+ tree lhs, rhs;
+ tree notmask;
+ gimple stmt = gsi_stmt (*gsi);
+ double_int val = double_int_not (tree_to_double_int (mask));
+ notmask = force_fit_type_double (TREE_TYPE (mask), val, 0, 0);
+ /* (REPL & (~mask)) | ((RHS<<offset)&mask) */
+ lhs = fold_convert (unsignedtype, repl);
+ lhs = fold_build2_loc (loc, BIT_AND_EXPR, unsignedtype, lhs,
notmask);
+ rhs = gimple_assign_rhs1 (stmt);
+ rhs = fold_convert_loc (loc, unsignedtype, rhs);
+ rhs = fold_build2_loc (loc, LSHIFT_EXPR, unsignedtype, rhs,
size_int (lbitpos));
+ rhs = fold_build2_loc (loc, BIT_AND_EXPR, unsignedtype, rhs,
mask);
+ rhs = fold_build2_loc (loc, BIT_IOR_EXPR, unsignedtype, lhs, rhs);
+ rhs = fold_convert_loc (loc, TREE_TYPE (repl), rhs);
+ rhs = force_gimple_operand_gsi (gsi, rhs, true, NULL_TREE,
+ true, GSI_SAME_STMT);
+ stmt = gimple_build_assign (repl, rhs);
+ gimple_set_location (stmt, loc);
+ gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
+ }
+ else
+ {
+ tree access;
+ access = fold_convert_loc (loc, unsignedtype, repl);
+ access = fold_build2_loc (loc, RSHIFT_EXPR, unsignedtype, access,
size_int (lbitpos));
+ access = fold_convert_loc (loc, TREE_TYPE (bfr), access);
+ *orig_expr = force_gimple_operand_gsi (gsi, access, true,
NULL_TREE,
+ true, GSI_SAME_STMT);
+ }
+ }
/* If we replace a non-register typed access simply use the original
access expression to extract the scalar component afterwards.
This happens if scalarizing a function return value or parameter
@@ -2678,7 +2729,7 @@ sra_modify_expr (tree *expr, gimple_stmt
be accessed as a different type too, potentially creating a need for
type conversion (see PR42196) and when scalarized unions are involved
in assembler statements (see PR42398). */
- if (!useless_type_conversion_p (type, access->type))
+ else if (!useless_type_conversion_p (type, access->type))
{
tree ref;