Hi, This patch improves the expansion of COND_EXPR into RTL, directly using conditional moves. I had to fix a bug in the x86 backend where emit_conditional_move could cause a crash as we had a comparison mode of DImode which is not handled by the 32bit part. can_conditionally_move_p return true as we had an SImode for the other operands. Note other targets might need a similar fix as x86 had but I could not test those targets and this is really the first time where emit_conditional_move is being called with different modes for the comparison and the other operands mode and the comparison mode is not of the CC class.
The main reasoning to do this conversion early rather than wait for ifconv as the resulting code is slightly better. Also the compiler is slightly faster. OK? Bootstrapped and tested on both mips64-linux-gnu (where it was originally written for) and x86_64-linux-gnu. Thanks, Andrew Pinski ChangeLog: * expr.c (convert_tree_comp_to_rtx): New function. (expand_expr_real_2): Try using conditional moves for COND_EXPRs if they exist. * config/i386/i386.c (ix86_expand_int_movcc): Disallow comparison modes of DImode for 32bits and TImode.
Index: expr.c =================================================================== --- expr.c (revision 186954) +++ expr.c (working copy) @@ -7344,6 +7344,64 @@ highest_pow2_factor_for_target (const_tr return MAX (factor, talign); } +/* Convert the tree comparision code TCODE to the rtl one where the + signedness is UNSIGNEDP. */ + +static enum rtx_code +convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp) +{ + enum rtx_code code; + switch (tcode) + { + case EQ_EXPR: + code = EQ; + break; + case NE_EXPR: + code = NE; + break; + case LT_EXPR: + code = unsignedp ? LTU : LT; + break; + case LE_EXPR: + code = unsignedp ? LEU : LE; + break; + case GT_EXPR: + code = unsignedp ? GTU : GT; + break; + case GE_EXPR: + code = unsignedp ? GEU : GE; + break; + case UNORDERED_EXPR: + code = UNORDERED; + break; + case ORDERED_EXPR: + code = ORDERED; + break; + case UNLT_EXPR: + code = UNLT; + break; + case UNLE_EXPR: + code = UNLE; + break; + case UNGT_EXPR: + code = UNGT; + break; + case UNGE_EXPR: + code = UNGE; + break; + case UNEQ_EXPR: + code = UNEQ; + break; + case LTGT_EXPR: + code = LTGT; + break; + + default: + gcc_unreachable (); + } + return code; +} + /* Subroutine of expand_expr. Expand the two operands of a binary expression EXP0 and EXP1 placing the results in OP0 and OP1. The value may be stored in TARGET if TARGET is nonzero. The @@ -8851,8 +8909,7 @@ expand_expr_real_2 (sepops ops, rtx targ && safe_from_p (original_target, treeop0, 1) && GET_MODE (original_target) == mode #ifdef HAVE_conditional_move - && (! can_conditionally_move_p (mode) - || REG_P (original_target)) + && 0 #endif && !MEM_P (original_target)) temp = original_target; @@ -8860,6 +8917,82 @@ expand_expr_real_2 (sepops ops, rtx targ temp = assign_temp (type, 0, 0, 1); do_pending_stack_adjust (); +#if HAVE_conditional_move + if (!can_conditionally_move_p (mode)) + mode = promote_mode (type, mode, &unsignedp); + if (can_conditionally_move_p (mode)) + { + rtx insn; + rtx op00, op01; + enum rtx_code comparison_code; + enum machine_mode comparison_mode; + start_sequence (); + + expand_operands (treeop1, treeop2, + temp, &op1, &op2, EXPAND_NORMAL); + if (TREE_CODE (treeop0) == SSA_NAME) + { + gimple srcstmt; + srcstmt = get_gimple_for_ssa_name (treeop0); + if (srcstmt + && TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) + == tcc_comparison) + { + tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt)); + op00 = expand_normal (gimple_assign_rhs1 (srcstmt)); + op01 = expand_normal (gimple_assign_rhs2 (srcstmt)); + comparison_code = convert_tree_comp_to_rtx (gimple_assign_rhs_code (srcstmt), TYPE_UNSIGNED (type)); + comparison_mode = TYPE_MODE (type); + unsignedp = TYPE_UNSIGNED (type); + } + else + goto non_comparison_cond_expr; + } + else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison) + { + tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0)); + op00 = expand_normal (TREE_OPERAND (treeop0, 0)); + op01 = expand_normal (TREE_OPERAND (treeop0, 1)); + comparison_code = convert_tree_comp_to_rtx (TREE_CODE (treeop0), TYPE_UNSIGNED (type)); + comparison_mode = TYPE_MODE (type); + unsignedp = TYPE_UNSIGNED (type); + } + else + { +non_comparison_cond_expr: + op00 = expand_normal (treeop0); + op01 = const0_rtx; + comparison_code = NE; + comparison_mode = TYPE_MODE (TREE_TYPE (treeop0)); + } + + if (GET_MODE (op1) != mode) + op1 = gen_lowpart (mode, op1); + + if (GET_MODE (op2) != mode) + op2 = gen_lowpart (mode, op2); + + /* Try to emit the conditional move. */ + insn = emit_conditional_move (temp, comparison_code, + op00, op01, comparison_mode, + op1, op2, mode, + unsignedp); + + /* If we could do the conditional move, emit the sequence, + and return. */ + if (insn) + { + rtx seq = get_insns (); + end_sequence (); + emit_insn (seq); + return temp; + } + + /* Otherwise discard the sequence and fall back to code with + branches. */ + end_sequence (); + } +#endif NO_DEFER_POP; op0 = gen_label_rtx (); op1 = gen_label_rtx (); Index: config/i386/i386.c =================================================================== --- config/i386/i386.c (revision 186954) +++ config/i386/i386.c (working copy) @@ -18806,6 +18806,11 @@ ix86_expand_int_movcc (rtx operands[]) rtx op0 = XEXP (operands[1], 0); rtx op1 = XEXP (operands[1], 1); + if (GET_MODE (op0) == TImode + || (GET_MODE (op0) == DImode + && !TARGET_64BIT)) + return false; + start_sequence (); compare_op = ix86_expand_compare (code, op0, op1); compare_seq = get_insns ();