Backports to 7.3
Hi! 5 further backports to 7 branch, bootstrapped/regtested on x86_64-linux and i686-linux, committed to branch. Jakub 2017-12-22 Jakub JelinekBackported from mainline 2017-12-19 Jakub Jelinek PR ipa/82801 PR ipa/83346 * ipa-inline.c (flatten_remove_node_hook): New function. (ipa_inline): Keep only nodes with flatten attribute at the end of the array in the order from ipa_reverse_postorder, only walk that portion of array for flattening, if there is more than one such node, temporarily register a removal hook and ignore removed nodes. * g++.dg/ipa/pr82801.C: New test. --- gcc/ipa-inline.c(revision 255804) +++ gcc/ipa-inline.c(revision 255805) @@ -2338,6 +2338,19 @@ dump_inline_stats (void) (int) reason[i][1], reason_freq[i].to_double (), reason[i][0]); } +/* Called when node is removed. */ + +static void +flatten_remove_node_hook (struct cgraph_node *node, void *data) +{ + if (lookup_attribute ("flatten", DECL_ATTRIBUTES (node->decl)) == NULL) +return; + + hash_set *removed += (hash_set *) data; + removed->add (node); +} + /* Decide on the inlining. We do so in the topological order to avoid expenses on updating data structures. */ @@ -2347,7 +2360,7 @@ ipa_inline (void) struct cgraph_node *node; int nnodes; struct cgraph_node **order; - int i; + int i, j; int cold; bool remove_functions = false; @@ -2380,26 +2393,56 @@ ipa_inline (void) if (dump_file) fprintf (dump_file, "\nFlattening functions:\n"); + /* First shrink order array, so that it only contains nodes with + flatten attribute. */ + for (i = nnodes - 1, j = i; i >= 0; i--) +{ + node = order[i]; + if (lookup_attribute ("flatten", + DECL_ATTRIBUTES (node->decl)) != NULL) + order[j--] = order[i]; +} + + /* After the above loop, order[j + 1] ... order[nnodes - 1] contain + nodes with flatten attribute. If there is more than one such + node, we need to register a node removal hook, as flatten_function + could remove other nodes with flatten attribute. See PR82801. */ + struct cgraph_node_hook_list *node_removal_hook_holder = NULL; + hash_set *flatten_removed_nodes = NULL; + if (j < nnodes - 2) +{ + flatten_removed_nodes = new hash_set; + node_removal_hook_holder + = symtab->add_cgraph_removal_hook (_remove_node_hook, + flatten_removed_nodes); +} + /* In the first pass handle functions to be flattened. Do this with a priority so none of our later choices will make this impossible. */ - for (i = nnodes - 1; i >= 0; i--) + for (i = nnodes - 1; i > j; i--) { node = order[i]; + if (flatten_removed_nodes + && flatten_removed_nodes->contains (node)) + continue; /* Handle nodes to be flattened. Ideally when processing callees we stop inlining at the entry of cycles, possibly cloning that entry point and try to flatten itself turning it into a self-recursive function. */ - if (lookup_attribute ("flatten", - DECL_ATTRIBUTES (node->decl)) != NULL) - { - if (dump_file) - fprintf (dump_file, -"Flattening %s\n", node->name ()); - flatten_function (node, false); - } + if (dump_file) + fprintf (dump_file, "Flattening %s\n", node->name ()); + flatten_function (node, false); } + + if (j < nnodes - 2) +{ + symtab->remove_cgraph_removal_hook (node_removal_hook_holder); + delete flatten_removed_nodes; +} + free (order); + if (dump_file) dump_overall_stats (); @@ -2411,7 +2454,6 @@ ipa_inline (void) inline functions and virtual functions so we really know what is called once. */ symtab->remove_unreachable_nodes (dump_file); - free (order); /* Inline functions with a property that after inlining into all callers the code size will shrink because the out-of-line copy is eliminated. --- gcc/testsuite/g++.dg/ipa/pr82801.C (nonexistent) +++ gcc/testsuite/g++.dg/ipa/pr82801.C (revision 255805) @@ -0,0 +1,20 @@ +// PR ipa/82801 +// { dg-do compile } +// { dg-options "-O2 -Wno-attributes" } + +template +struct A { A () {} }; +struct B { double foo () const; }; + +__attribute__((always_inline, flatten)) +double B::foo () const +{ + A<1> v; + return 0.0; +} + +int +main () +{ + return 0; +} 2017-12-22 Jakub Jelinek Backported from mainline 2017-12-21 Jakub Jelinek PR tree-optimization/83521 * tree-ssa-phiopt.c (factor_out_conditional_conversion): Use gimple_build_assign without code on result of fold_build1 (VIEW_CONVERT_EXPR, ...), as it might not create a VIEW_CONVERT_EXPR.
Re: Backports to 7.3
On Fri, Dec 15, 2017 at 11:23:21PM +0100, Jakub Jelinek wrote: > Hi! > > I've backported today following 23 patches after bootstrapping/regtesting > them on x86_64-linux and i686-linux. And 2 more: Jakub 2017-12-16 Jakub JelinekBackported from mainline 2017-12-15 Jakub Jelinek PR c++/81197 * cp-tree.h (cp_maybe_mangle_decomp): Declare. * decl.c (cp_maybe_mangle_decomp): New function. (cp_finish_decomp): Don't SET_DECL_ASSEMBLER_NAME here. * parser.c (cp_convert_range_for, cp_parser_decomposition_declaration): Call cp_maybe_mangle_decomp. * pt.c (tsubst_expr): Likewise. * mangle.c (find_decomp_unqualified_name): New function. (write_unqualified_name): Handle DECL_DECOMPOSITION_P where DECL_ASSEMBLER_NAME is already set. * g++.dg/cpp1z/decomp34.C: New test. --- gcc/cp/decl.c (revision 255704) +++ gcc/cp/decl.c (revision 255705) @@ -7339,6 +7339,25 @@ lookup_decomp_type (tree v) return *decomp_type_table->get (v); } +/* Mangle a decomposition declaration if needed. Arguments like + in cp_finish_decomp. */ + +void +cp_maybe_mangle_decomp (tree decl, tree first, unsigned int count) +{ + if (!processing_template_decl + && !error_operand_p (decl) + && DECL_NAMESPACE_SCOPE_P (decl)) +{ + auto_vec v; + v.safe_grow (count); + tree d = first; + for (unsigned int i = 0; i < count; i++, d = DECL_CHAIN (d)) + v[count - i - 1] = d; + SET_DECL_ASSEMBLER_NAME (decl, mangle_decomp (decl, v)); +} +} + /* Finish a decomposition declaration. DECL is the underlying declaration "e", FIRST is the head of a chain of decls for the individual identifiers chained through DECL_CHAIN in reverse order and COUNT is the number of @@ -7612,8 +7631,6 @@ cp_finish_decomp (tree decl, tree first, DECL_HAS_VALUE_EXPR_P (v[i]) = 1; } } - else if (DECL_NAMESPACE_SCOPE_P (decl)) -SET_DECL_ASSEMBLER_NAME (decl, mangle_decomp (decl, v)); } /* Returns a declaration for a VAR_DECL as if: --- gcc/cp/pt.c (revision 255704) +++ gcc/cp/pt.c (revision 255705) @@ -16135,19 +16135,23 @@ tsubst_expr (tree t, tree args, tsubst_f if (VAR_P (decl)) const_init = (DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (pattern_decl)); - cp_finish_decl (decl, init, const_init, NULL_TREE, 0); if (VAR_P (decl) && DECL_DECOMPOSITION_P (decl) && TREE_TYPE (pattern_decl) != error_mark_node) { unsigned int cnt; tree first; - decl = tsubst_decomp_names (decl, pattern_decl, args, - complain, in_decl, , - ); - if (decl != error_mark_node) - cp_finish_decomp (decl, first, cnt); + tree ndecl + = tsubst_decomp_names (decl, pattern_decl, args, +complain, in_decl, , ); + if (ndecl != error_mark_node) + cp_maybe_mangle_decomp (ndecl, first, cnt); + cp_finish_decl (decl, init, const_init, NULL_TREE, 0); + if (ndecl != error_mark_node) + cp_finish_decomp (ndecl, first, cnt); } + else + cp_finish_decl (decl, init, const_init, NULL_TREE, 0); } } } --- gcc/cp/parser.c (revision 255704) +++ gcc/cp/parser.c (revision 255705) @@ -11735,6 +11735,9 @@ cp_convert_range_for (tree statement, tr tf_warning_or_error); finish_for_expr (expression, statement); + if (VAR_P (range_decl) && DECL_DECOMPOSITION_P (range_decl)) +cp_maybe_mangle_decomp (range_decl, decomp_first_name, decomp_cnt); + /* The declaration is initialized with *__begin inside the loop body. */ cp_finish_decl (range_decl, build_x_indirect_ref (input_location, begin, RO_NULL, @@ -13059,6 +13062,7 @@ cp_parser_decomposition_declaration (cp_ if (decl != error_mark_node) { + cp_maybe_mangle_decomp (decl, prev, v.length ()); cp_finish_decl (decl, initializer, non_constant_p, NULL_TREE, is_direct_init ? LOOKUP_NORMAL : LOOKUP_IMPLICIT); cp_finish_decomp (decl, prev, v.length ()); --- gcc/cp/mangle.c (revision 255704) +++ gcc/cp/mangle.c (revision 255705) @@ -1247,6 +1247,51 @@ write_template_prefix (const tree node) add_substitution (substitution); } +/* As the list of identifiers for the structured
Backports to 7.3
Hi! I've backported today following 23 patches after bootstrapping/regtesting them on x86_64-linux and i686-linux. Some of the backports are just partial backports, in particular for r255133 I've just backported the removal of case BUILT_IN_STPNCPY_CHK:, for r255354 the patch didn't apply at all, because we don't have the C++ __builtin_unreachables with BUILTINS_LOCATION, so rewrote it manually, and finally r255574 is just the fix, so am not trying to extend it to reversed loops and of course it didn't apply cleanly either. Jakub 2017-12-15 Jakub JelinekBackported from mainline 2017-11-20 Jakub Jelinek PR c++/82781 * constexpr.c (cxx_eval_vector_conditional_expression): New function. (cxx_eval_constant_expression) : Use it instead of cxx_eval_conditional_expression. * g++.dg/ext/constexpr-pr82781.C: New test. --- gcc/cp/constexpr.c (revision 254951) +++ gcc/cp/constexpr.c (revision 254952) @@ -2086,6 +2086,45 @@ cxx_eval_conditional_expression (const c jump_target); } +/* Subroutine of cxx_eval_constant_expression. + Attempt to evaluate vector condition expressions. Unlike + cxx_eval_conditional_expression, VEC_COND_EXPR acts like a normal + ternary arithmetics operation, where all 3 arguments have to be + evaluated as constants and then folding computes the result from + them. */ + +static tree +cxx_eval_vector_conditional_expression (const constexpr_ctx *ctx, tree t, + bool *non_constant_p, bool *overflow_p) +{ + tree arg1 = cxx_eval_constant_expression (ctx, TREE_OPERAND (t, 0), + /*lval*/false, + non_constant_p, overflow_p); + VERIFY_CONSTANT (arg1); + tree arg2 = cxx_eval_constant_expression (ctx, TREE_OPERAND (t, 1), + /*lval*/false, + non_constant_p, overflow_p); + VERIFY_CONSTANT (arg2); + tree arg3 = cxx_eval_constant_expression (ctx, TREE_OPERAND (t, 2), + /*lval*/false, + non_constant_p, overflow_p); + VERIFY_CONSTANT (arg3); + location_t loc = EXPR_LOCATION (t); + tree type = TREE_TYPE (t); + tree r = fold_ternary_loc (loc, VEC_COND_EXPR, type, arg1, arg2, arg3); + if (r == NULL_TREE) +{ + if (arg1 == TREE_OPERAND (t, 0) + && arg2 == TREE_OPERAND (t, 1) + && arg3 == TREE_OPERAND (t, 2)) + r = t; + else + r = build3_loc (loc, VEC_COND_EXPR, type, arg1, arg2, arg3); +} + VERIFY_CONSTANT (r); + return r; +} + /* Returns less than, equal to, or greater than zero if KEY is found to be less than, to match, or to be greater than the constructor_elt's INDEX. */ @@ -4398,12 +4437,14 @@ cxx_eval_constant_expression (const cons jump_target); break; } - /* FALLTHRU */ -case VEC_COND_EXPR: r = cxx_eval_conditional_expression (ctx, t, lval, non_constant_p, overflow_p, jump_target); break; +case VEC_COND_EXPR: + r = cxx_eval_vector_conditional_expression (ctx, t, non_constant_p, + overflow_p); + break; case CONSTRUCTOR: if (TREE_CONSTANT (t)) --- gcc/testsuite/g++.dg/ext/constexpr-pr82781.C(nonexistent) +++ gcc/testsuite/g++.dg/ext/constexpr-pr82781.C(revision 254952) @@ -0,0 +1,12 @@ +// PR c++/82781 +// { dg-do compile { target c++11 } } + +typedef int V __attribute__ ((vector_size (16))); +constexpr V b1 = { 0, 1, 10, 20 }; +constexpr V b2 = { 0, 2, 10, 0 }; +constexpr V b3 = b1 == b2; + +static_assert (b3[0] == -1, ""); +static_assert (b3[1] == 0, ""); +static_assert (b3[2] == -1, ""); +static_assert (b3[3] == 0, ""); 2017-12-15 Jakub Jelinek Backported from mainline 2017-11-21 Jakub Jelinek PR c++/83059 * c-common.c (get_atomic_generic_size): Use TREE_INT_CST_LOW instead of tree_to_uhwi, formatting fix. * c-c++-common/pr83059.c: New test. --- gcc/c-family/c-common.c (revision 254989) +++ gcc/c-family/c-common.c (revision 254990) @@ -6671,13 +6671,14 @@ get_atomic_generic_size (location_t loc, tree p = (*params)[x]; if (TREE_CODE (p) == INTEGER_CST) { - int i = tree_to_uhwi (p); - if (i < 0 || (memmodel_base (i) >= MEMMODEL_LAST)) - { - warning_at (loc, OPT_Winvalid_memory_model, - "invalid memory model argument %d of %qE", x + 1, - function); - } + /* memmodel_base masks the low 16 bits, thus ignore any