This patch makes cstore expand all comparisons against a constant
(that aren't already handled by a subfc;subfe sequence) as an
addi ; [n]{and,or}[c] ; sr[wd]i  sequence.  It expands all the
possible inversions separately (as well as add to zero); combine
and friends will take care of all that.

The two new expanders take a const_int_operand; this is more than
they can handle, but much less than its only caller (cstore) will
give it.  cstore should handle more than just short operands as
it does now, but this patch doesn't touch that.

Bootstrapped and tested on powerpc64-linux, with the usual set of flags
(-m32,-m32/-mpowerpc64,-m64,-m64/-mlra); no regressions.  This code is
used thousands of times in the compiler itself, so no testcases.

Okay for trunk?


Segher


2015-05-06  Segher Boessenkool  <seg...@kernel.crashing.org>

        * config/rs6000/rs6000.md (cstore<mode>4_signed_imm): New expander.
        (cstore<mode>4_unsigned_imm): New expander.
        (cstore<mode>4): Remove empty constraint strings.  Use the new
        expanders.

---
 gcc/config/rs6000/rs6000.md | 113 +++++++++++++++++++++++++++++++++++++++++---
 1 file changed, 106 insertions(+), 7 deletions(-)

diff --git a/gcc/config/rs6000/rs6000.md b/gcc/config/rs6000/rs6000.md
index 0178bf4..58c76f9 100644
--- a/gcc/config/rs6000/rs6000.md
+++ b/gcc/config/rs6000/rs6000.md
@@ -11784,10 +11784,102 @@ (define_expand "cstore<mode>4_unsigned"
   DONE;
 })
 
+(define_expand "cstore<mode>4_signed_imm"
+  [(use (match_operator 1 "signed_comparison_operator"
+         [(match_operand:GPR 2 "gpc_reg_operand")
+          (match_operand:GPR 3 "immediate_operand")]))
+   (clobber (match_operand:GPR 0 "register_operand"))]
+  ""
+{
+  bool invert = false;
+
+  enum rtx_code cond_code = GET_CODE (operands[1]);
+
+  rtx op0 = operands[0];
+  rtx op1 = operands[2];
+  HOST_WIDE_INT val = INTVAL (operands[3]);
+
+  if (cond_code == GE || cond_code == GT)
+    {
+      cond_code = reverse_condition (cond_code);
+      invert = true;
+    }
+
+  if (cond_code == LE)
+    val++;
+
+  rtx tmp = gen_reg_rtx (<MODE>mode);
+  emit_insn (gen_add<mode>3 (tmp, op1, GEN_INT (-val)));
+  rtx x = gen_reg_rtx (<MODE>mode);
+  if (val < 0)
+    emit_insn (gen_and<mode>3 (x, op1, tmp));
+  else
+    emit_insn (gen_ior<mode>3 (x, op1, tmp));
+
+  if (invert)
+    {
+      rtx tmp = gen_reg_rtx (<MODE>mode);
+      emit_insn (gen_one_cmpl<mode>2 (tmp, x));
+      x = tmp;
+    }
+
+  int sh = GET_MODE_BITSIZE (<MODE>mode) - 1;
+  emit_insn (gen_lshr<mode>3 (op0, x, GEN_INT (sh)));
+
+  DONE;
+})
+
+(define_expand "cstore<mode>4_unsigned_imm"
+  [(use (match_operator 1 "unsigned_comparison_operator"
+         [(match_operand:GPR 2 "gpc_reg_operand")
+          (match_operand:GPR 3 "immediate_operand")]))
+   (clobber (match_operand:GPR 0 "register_operand"))]
+  ""
+{
+  bool invert = false;
+
+  enum rtx_code cond_code = GET_CODE (operands[1]);
+
+  rtx op0 = operands[0];
+  rtx op1 = operands[2];
+  HOST_WIDE_INT val = INTVAL (operands[3]);
+
+  if (cond_code == GEU || cond_code == GTU)
+    {
+      cond_code = reverse_condition (cond_code);
+      invert = true;
+    }
+
+  if (cond_code == LEU)
+    val++;
+
+  rtx tmp = gen_reg_rtx (<MODE>mode);
+  rtx tmp2 = gen_reg_rtx (<MODE>mode);
+  emit_insn (gen_add<mode>3 (tmp, op1, GEN_INT (-val)));
+  emit_insn (gen_one_cmpl<mode>2 (tmp2, op1));
+  rtx x = gen_reg_rtx (<MODE>mode);
+  if (val < 0)
+    emit_insn (gen_ior<mode>3 (x, tmp, tmp2));
+  else
+    emit_insn (gen_and<mode>3 (x, tmp, tmp2));
+
+  if (invert)
+    {
+      rtx tmp = gen_reg_rtx (<MODE>mode);
+      emit_insn (gen_one_cmpl<mode>2 (tmp, x));
+      x = tmp;
+    }
+
+  int sh = GET_MODE_BITSIZE (<MODE>mode) - 1;
+  emit_insn (gen_lshr<mode>3 (op0, x, GEN_INT (sh)));
+
+  DONE;
+})
+
 (define_expand "cstore<mode>4"
   [(use (match_operator 1 "rs6000_cbranch_operator"
-         [(match_operand:GPR 2 "gpc_reg_operand" "")
-          (match_operand:GPR 3 "reg_or_short_operand" "")]))
+         [(match_operand:GPR 2 "gpc_reg_operand")
+          (match_operand:GPR 3 "reg_or_short_operand")]))
    (clobber (match_operand:GPR 0 "register_operand"))]
   ""
 {
@@ -11816,11 +11908,18 @@ (define_expand "cstore<mode>4"
     emit_insn (gen_cstore<mode>4_unsigned (operands[0], operands[1],
                                           operands[2], operands[3]));
 
-  /* The generic code knows tricks to compute signed comparisons against
-     zero.  Let it do its thing.  */
-  else if (operands[3] == const0_rtx
-          && signed_comparison_operator (operands[1], VOIDmode))
-    FAIL;
+  /* For signed comparisons against a constant, we can do some simple
+     bit-twiddling.  */
+  else if (signed_comparison_operator (operands[1], VOIDmode)
+          && CONST_INT_P (operands[3]))
+    emit_insn (gen_cstore<mode>4_signed_imm (operands[0], operands[1],
+                                            operands[2], operands[3]));
+
+  /* And similarly for unsigned comparisons.  */
+  else if (unsigned_comparison_operator (operands[1], VOIDmode)
+          && CONST_INT_P (operands[3]))
+    emit_insn (gen_cstore<mode>4_unsigned_imm (operands[0], operands[1],
+                                              operands[2], operands[3]));
 
   /* Everything else, use the mfcr brute force.  */
   else
-- 
1.8.1.4

Reply via email to