The following passes down whether a stmt is always executed from infer_loop_bounds_from_undefined to infer_loop_bounds_from_array. The parameters were already documented. The patch doesn't remove possibly redundant checks from idx_infer_loop_bounds yet.
Boostrapped on x86_64-unknown-linux-gnu, testing in progress. * tree-ssa-loop-niter.cc (ilb_data::reliable): New. (idx_infer_loop_bounds): Initialize upper from reliable. (infer_loop_bounds_from_ref): Get and pass through reliable flag. (infer_loop_bounds_from_array): Likewise. (infer_loop_bounds_from_undefined): Pass reliable flag to infer_loop_bounds_from_array. --- gcc/tree-ssa-loop-niter.cc | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/gcc/tree-ssa-loop-niter.cc b/gcc/tree-ssa-loop-niter.cc index c6d010f6d89..0a77c1bb544 100644 --- a/gcc/tree-ssa-loop-niter.cc +++ b/gcc/tree-ssa-loop-niter.cc @@ -4123,6 +4123,7 @@ struct ilb_data { class loop *loop; gimple *stmt; + bool reliable; }; static bool @@ -4131,7 +4132,7 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta) struct ilb_data *data = (struct ilb_data *) dta; tree ev, init, step; tree low, high, type, next; - bool sign, upper = true, has_flexible_size = false; + bool sign, upper = data->reliable, has_flexible_size = false; class loop *loop = data->loop; if (TREE_CODE (base) != ARRAY_REF) @@ -4224,12 +4225,14 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta) STMT is guaranteed to be executed in every iteration of LOOP.*/ static void -infer_loop_bounds_from_ref (class loop *loop, gimple *stmt, tree ref) +infer_loop_bounds_from_ref (class loop *loop, gimple *stmt, tree ref, + bool reliable) { struct ilb_data data; data.loop = loop; data.stmt = stmt; + data.reliable = reliable; for_each_index (&ref, idx_infer_loop_bounds, &data); } @@ -4238,7 +4241,7 @@ infer_loop_bounds_from_ref (class loop *loop, gimple *stmt, tree ref) executed in every iteration of LOOP. */ static void -infer_loop_bounds_from_array (class loop *loop, gimple *stmt) +infer_loop_bounds_from_array (class loop *loop, gimple *stmt, bool reliable) { if (is_gimple_assign (stmt)) { @@ -4248,10 +4251,10 @@ infer_loop_bounds_from_array (class loop *loop, gimple *stmt) /* For each memory access, analyze its access function and record a bound on the loop iteration domain. */ if (REFERENCE_CLASS_P (op0)) - infer_loop_bounds_from_ref (loop, stmt, op0); + infer_loop_bounds_from_ref (loop, stmt, op0, reliable); if (REFERENCE_CLASS_P (op1)) - infer_loop_bounds_from_ref (loop, stmt, op1); + infer_loop_bounds_from_ref (loop, stmt, op1, reliable); } else if (is_gimple_call (stmt)) { @@ -4260,13 +4263,13 @@ infer_loop_bounds_from_array (class loop *loop, gimple *stmt) lhs = gimple_call_lhs (stmt); if (lhs && REFERENCE_CLASS_P (lhs)) - infer_loop_bounds_from_ref (loop, stmt, lhs); + infer_loop_bounds_from_ref (loop, stmt, lhs, reliable); for (i = 0; i < n; i++) { arg = gimple_call_arg (stmt, i); if (REFERENCE_CLASS_P (arg)) - infer_loop_bounds_from_ref (loop, stmt, arg); + infer_loop_bounds_from_ref (loop, stmt, arg, reliable); } } } @@ -4410,7 +4413,7 @@ infer_loop_bounds_from_undefined (class loop *loop, basic_block *bbs) { gimple *stmt = gsi_stmt (bsi); - infer_loop_bounds_from_array (loop, stmt); + infer_loop_bounds_from_array (loop, stmt, reliable); if (reliable) { -- 2.35.3