Hi, evaluating avr-gcc 4.3.2 I observed some weaknesses concerning the code it generates. One example is this:
<pre> unsigned char extract_sfrbit_1() { unsigned char i = 0; if (REG & 64) i = 1; return i; } </pre> which is compiled (-Os) to <pre> extract_sfrbit_1: in r24,32-0x20 ; 6 *movqi/4 [length = 1] ldi r25,lo8(0) ; 26 *movqi/2 [length = 1] ldi r18,6 ; 31 *lshrhi3_const/5 [length = 5] 1: lsr r25 ror r24 dec r18 brne 1b andi r24,lo8(1) ; 14 andqi3/2 [length = 1] ret ; 29 return [length = 1] </pre> The Middleend flatens control flow by replacing some conditionals with algebraic manipulations. However, jumps on AVR are not as expensive as on 32 bit machines while shifts are more expensive. But even in backends like avr is is easier to transform algebraic stuff into jumps than the other way round. Problems like this can be fixed by introducing some patterns for the combiner. I implemented some patters which deal with bit manipulations. Overall, the code is faster and smaller, but some problems arise: 1) Would these pattern be helpful to improve gcc? 2) I never contributed to gcc, so I am unsure how to do that. 3) What about testing? I have no test environment. 4) The patch is just intended to be a snapshot. The "-mtest" is used for testing purposes and to see if code size reduces or not. 5) The pattern themselves work fine. However, avr.c:adjust_insn_length() makes many assumptions on patterns and how they look like or don't look like. This is very hard to maintain for komplex patterns, and also it is error prone. Therefore, I did not (yet) implement the new stuff on the adjust_insn_length-level. Is there a more convenient, less error prone and more readable way to state insn lengths? Like, e.g. to assign negative insn length to indicate that an alternative's length needs adjustment? Please be kind, I am a newbie :-) Consider the attached patch just as a basis for a talk. The C source can be used to make gcc to use most of the new patterns. The diff is against gcc_4_3_2_release. And again: This patch may lead to wrong code because insn lengths might be overwritten by adjust_insn_length() Georg-Johann ____________________________________________________________________ Psssst! Schon vom neuen WEB.DE MultiMessenger gehört? Der kann`s mit allen: http://www.produkte.web.de/messenger/?did=3123
Index: gcc/ChangeLog =================================================================== --- gcc/ChangeLog (revision 142396) +++ gcc/ChangeLog (working copy) @@ -3874,3 +3874,23 @@ * config/i386/sse.md (sse5_pperm, sse5_pperm_pack_v2di_v4si, sse5_pperm_pack_v4si_v8hi, sse5_pperm_pack_v8hi_v16qi, sse5_perm<mode>): Fix constraints. + +2008-12-08 Georg-Johann Lay <[EMAIL PROTECTED]> + * config/avr/avr.md (*sbrx_branch_shiftrt<MODE>, + *sbix_branch_shiftrt, + *extract_bithiqi, *extract_bithihi, *extract_bitqihi, + *extract_bit<MODE>qi, *insert_bit<MODE>qi, + *ashift_1_bit, *shiftrt_1_bit, + *insert_1_bit_ashift, *insert_1_bit0, + *iorqi2_ashift_bit7, *iorqi2_ashift_bit, + *movebit_ashift, *iorqi2_shiftrt_bit, + *movebit_shiftrt, *movebit_shiftrt_7, *movebit_ashift_7, + *ior<MODE>2_<MODE>bit0, *ior<MODE>2_zeroextract_<MODE>bit, + *zero_extendqihi2_bit): New, anonymous insns for better + support of single bit operations. + * config/avr/predicates.md (shiftrt_operator): New operator + predicate for LSHIFTRT and ASHIFTRT used in some of the above + insns. + + + Index: gcc/config/avr/predicates.md =================================================================== --- gcc/config/avr/predicates.md (revision 142396) +++ gcc/config/avr/predicates.md (working copy) @@ -105,3 +105,7 @@ (and (match_code "mem") (ior (match_test "register_operand (XEXP (op, 0), mode)") (match_test "CONSTANT_ADDRESS_P (XEXP (op, 0))")))) + +;; True for ASHIFTRT & LSHIFTRT +(define_predicate "shiftrt_operator" + (match_code "ashiftrt,lshiftrt")) Index: gcc/config/avr/constraints.md =================================================================== --- gcc/config/avr/constraints.md (revision 142396) +++ gcc/config/avr/constraints.md (working copy) @@ -107,3 +107,8 @@ "A memory address based on Y or Z pointer with displacement." (and (match_code "mem") (match_test "extra_constraint_Q (op)"))) + +(define_constraint "Y" + "Integer constant of 32 bit is a power of 2." + (and (match_code "const_int") + (match_test "exact_log2(0xffffffff & ival) >= 0 && avr_test"))) Index: gcc/config/avr/avr.md =================================================================== --- gcc/config/avr/avr.md (revision 142396) +++ gcc/config/avr/avr.md (working copy) @@ -1,4 +1,3 @@ -;; -*- Mode: Scheme -*- ;; Machine description for GNU compiler, ;; for ATMEL AVR micro controllers. ;; Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008 @@ -57,6 +56,10 @@ (UNSPECV_PROLOGUE_SAVES 0) (UNSPECV_EPILOGUE_RESTORES 1)]) +;; Some mode iterators for scalar int modes used in bit insns. +(define_mode_iterator HISI [HI SI]) +(define_mode_iterator QIHISI [QI HI SI]) + (include "predicates.md") (include "constraints.md") @@ -1232,9 +1235,9 @@ (set_attr "cc" "clobber,set_n")]) (define_insn "iorsi3" - [(set (match_operand:SI 0 "register_operand" "=r,d") - (ior:SI (match_operand:SI 1 "register_operand" "%0,0") - (match_operand:SI 2 "nonmemory_operand" "r,i")))] + [(set (match_operand:SI 0 "register_operand" "=r,d,l") + (ior:SI (match_operand:SI 1 "register_operand" "%0,0,0") + (match_operand:SI 2 "nonmemory_operand" "r,i,Y")))] "" "*{ if (which_alternative==0) @@ -1242,6 +1245,18 @@ AS2 (or, %B0,%B2) CR_TAB AS2 (or, %C0,%C2) CR_TAB AS2 (or, %D0,%D2)); + + if (which_alternative==2 && avr_test) + { + HOST_WIDE_INT bit = exact_log2 (0xffffffff & INTVAL (operands[2])); + operands[2] = GEN_INT (bit); + output_asm_insn (\"set\", operands); + if (bit >= 24) return (AS2 (bld, %D0,%2-24)); + if (bit >= 16) return (AS2 (bld, %C0,%2-16)); + if (bit >= 8) return (AS2 (bld, %B0,%2-8)); + return (AS2 (bld, %A0,%2)); + } + if (GET_CODE (operands[2]) == CONST_INT) { HOST_WIDE_INT mask = INTVAL (operands[2]); @@ -1260,8 +1275,8 @@ AS2 (ori, %C0,hlo8(%2)) CR_TAB AS2 (ori, %D0,hhi8(%2))); }" - [(set_attr "length" "4,4") - (set_attr "cc" "set_n,clobber")]) + [(set_attr "length" "4,4,2") + (set_attr "cc" "set_n,clobber,none")]) (define_insn "*iorsi3_clobber" [(set (match_operand:SI 0 "register_operand" "=r,r") @@ -2061,6 +2076,30 @@ (const_int 4)))) (set_attr "cc" "clobber")]) +;; same as above +(define_insn "*sbrx_branch_shiftrt<MODE>" + [(set (pc) + (if_then_else + (match_operator 0 "eqne_operator" + [(zero_extract (match_operator:QIHISI 4 "shiftrt_operator" + [(match_operand:QIHISI 1 "register_operand" "r") + (match_operand 2 "const_int_operand" "n")]) + (const_int 1) + (const_int 0)) + (const_int 0)]) + (label_ref (match_operand 3 "" "")) + (pc)))] + "(avr_test & 512)" + "* return avr_out_sbxx_branch (insn, operands);" + [(set (attr "length") + (if_then_else (and (ge (minus (pc) (match_dup 3)) (const_int -2046)) + (le (minus (pc) (match_dup 3)) (const_int 2046))) + (const_int 2) + (if_then_else (eq_attr "mcu_mega" "no") + (const_int 2) + (const_int 4)))) + (set_attr "cc" "clobber")]) + (define_insn "*sbrx_and_branchhi" [(set (pc) (if_then_else @@ -2516,6 +2555,30 @@ (const_int 4)))) (set_attr "cc" "clobber")]) +;; same as above, needed because rtx was not canonicalised +(define_insn "*sbix_branch_shiftrt" + [(set (pc) + (if_then_else + (match_operator 0 "eqne_operator" + [(zero_extract (match_operator:QI 4 "shiftrt_operator" + [(mem:QI (match_operand 1 "low_io_address_operand" "n")) + (match_operand 2 "const_int_operand" "n")]) + (const_int 1) + (const_int 0)) + (const_int 0)]) + (label_ref (match_operand 3 "" "")) + (pc)))] + "(optimize > 0) && (avr_test & 512)" + "* return avr_out_sbxx_branch (insn, operands);" + [(set (attr "length") + (if_then_else (and (ge (minus (pc) (match_dup 3)) (const_int -2046)) + (le (minus (pc) (match_dup 3)) (const_int 2046))) + (const_int 2) + (if_then_else (eq_attr "mcu_mega" "no") + (const_int 2) + (const_int 4)))) + (set_attr "cc" "clobber")]) + ;; Tests of bit 7 are pessimized to sign tests, so we need this too... (define_insn "*sbix_branch_bit7" [(set (pc) @@ -2840,3 +2903,349 @@ expand_epilogue (); DONE; }") + +;; :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 :1 +;; Move and extract single bits. +;; The patterns are generated by combine, almost all use BST/BLD to move +;; bits around. + +;; Extract one bit from a complex expression. +;; Note that introducing (zero_extract:QI) would reduce performance. +;; [0] = [1].[2] +(define_insn "*extract_bithiqi" + [(set (match_operand:QI 0 "register_operand" "=r") + (subreg:QI (zero_extract:HI (match_operand:QI 1 "register_operand" "r") + (const_int 1) + (match_operand:QI 2 "const_int_operand" "n")) 0))] + "(avr_test & 2) && INTVAL(operands[2]) > 1" + "bst %1,%2\;clr %0\;bld %0,0" + [(set_attr "length" "3") + (set_attr "cc" "clobber")]) + +;; FIXME: no performance gain --> removed +(define_insn "*extract_bitqi" + [(set (match_operand:QI 0 "register_operand" "=r") + (zero_extract:QI (match_operand:QI 1 "register_operand" "r") + (const_int 1) + (match_operand:QI 2 "const_int_operand" "n")))] + "(avr_test & 1) && 0 && INTVAL(operands[2]) > 1" + "bst %1,%2\;clr %0\;bld %0,0" + [(set_attr "length" "3") + (set_attr "cc" "clobber")]) + +;; [0] = [1].[2] +(define_insn "*extract_bithihi" + [(set (match_operand:HI 0 "register_operand" "=r") + (and:HI (match_operator:HI 3 "shiftrt_operator" + [(match_operand:HI 1 "register_operand" "r") + (match_operand:HI 2 "const_int_operand" "n")]) + (const_int 1)))] + "(avr_test & 2) && (INTVAL (operands[2]) & 7) > 1" + { + return (INTVAL (operands[2]) >= 8) + ? "bst %B1,%2-8\;clr %B0\;clr %A0\;bld %A0,0" + : "bst %A1,%2 \;clr %B0\;clr %A0\;bld %A0,0"; + } + [(set_attr "length" "4") + (set_attr "cc" "clobber")]) + +;; [0] = [1].[2] +(define_insn "*extract_bitqihi" + [(set (match_operand:HI 0 "register_operand" "=r") + (zero_extract:HI (match_operand:QI 1 "register_operand" "r") + (const_int 1) + (match_operand:QI 2 "const_int_operand" "n")))] + "(avr_test & 2) && INTVAL(operands[2]) > 1" + "bst %1,%2\;clr %B0\;clr %A0\;bld %A0,0" + [(set_attr "length" "4") + (set_attr "cc" "clobber")]) + +;; [0] = [2].[3] +(define_insn "*extract_bit<MODE>qi" + [(set (match_operand:QI 0 "register_operand" "=r") + (and:QI (subreg:QI (match_operator:HISI 1 "shiftrt_operator" + [(match_operand:HISI 2 "register_operand" "r") + (match_operand:HISI 3 "const_int_operand" "n")]) 0) + (const_int 1)))] + "(avr_test & 2) && (<MODE>mode == SImode || (INTVAL(operands[3]) & 7) > 1)" + { + HOST_WIDE_INT bit = INTVAL (operands[3]); + if (bit >= 24) return "bst %D2,%3-24\;clr %A0\;bld %A0,0"; + if (bit >= 16) return "bst %C2,%3-16\;clr %A0\;bld %A0,0"; + if (bit >= 8) return "bst %B2,%3-8 \;clr %A0\;bld %A0,0"; + return "bst %A2,%3 \;clr %A0\;bld %A0,0"; + } + [(set_attr "length" "3") + (set_attr "cc" "clobber")]) + +;; [0] = 0 +;; [0].[4] = [2].[3] +(define_insn "*insert_bit<MODE>qi" + [(set (match_operand:QI 0 "register_operand" "=r") + (and:QI (ashift:QI (subreg:QI (match_operator:HISI 1 "shiftrt_operator" + [(match_operand:HISI 2 "register_operand" "r") + (match_operand:HISI 3 "const_int_operand" "n")]) 0) + (match_operand:QI 4 "const_int_operand" "n")) + (match_operand:QI 5 "single_one_operand" "n")))] ; = (1 << [4]) + "(avr_test & 4) && INTVAL(operands[4]) == exact_log2(0xff & INTVAL(operands[5]))" + { + HOST_WIDE_INT from_bit = INTVAL(operands[3]); + + if (from_bit >= 24) return "bst %D2,%3-24\;clr %0\;bld %0,%4"; + if (from_bit >= 16) return "bst %C2,%3-16\;clr %0\;bld %0,%4"; + if (from_bit >= 8) return "bst %B2,%3-8 \;clr %0\;bld %0,%4"; + return "bst %A2,%3 \;clr %0\;bld %0,%4"; + } + [(set_attr "length" "3") + (set_attr "cc" "clobber")]) + +;; [0] = 0 +;; [0].log[3] = [1].log[3]-[2] +(define_insn "*ashift_1_bit" + [(set (match_operand:QI 0 "register_operand" "=r") + (and:QI (ashift:QI (match_operand:QI 1 "register_operand" "r") + (match_operand:QI 2 "const_int_operand" "n")) + (match_operand:QI 3 "single_one_operand" "n")))] + "(avr_test & 4) && INTVAL(operands[2]) > 1" + { + HOST_WIDE_INT to_bit = exact_log2 (0xff & INTVAL(operands[3])); + HOST_WIDE_INT from_bit = to_bit - INTVAL(operands[2]); + operands[2] = GEN_INT(from_bit); + operands[3] = GEN_INT(to_bit); + return "bst %1,%2\;clr %0\;bld %0,%3"; + } + [(set_attr "length" "3") + (set_attr "cc" "clobber")]) + +;; [0] = 0 +;; [0].log[3] = [1].log[3]+[2] +(define_insn "*shiftrt_1_bit" + [(set (match_operand:QI 0 "register_operand" "=r") + (and:QI (match_operator:QI 4 "shiftrt_operator" + [(match_operand:QI 1 "register_operand" "r") + (match_operand:QI 2 "const_int_operand" "n")]) + (match_operand:QI 3 "single_one_operand" "n")))] + "(avr_test & 4) && INTVAL(operands[2]) > 1" + { + HOST_WIDE_INT to_bit = exact_log2 (0xff & INTVAL(operands[3])); + HOST_WIDE_INT from_bit = to_bit + INTVAL(operands[2]); + operands[2] = GEN_INT(from_bit); + operands[3] = GEN_INT(to_bit); + return "bst %1,%2\;clr %0\;bld %0,%3"; + } + [(set_attr "length" "3") + (set_attr "cc" "clobber")]) + +;; [0].log[5] = [3].log[5]-[4] +(define_insn "*insert_1_bit_ashift" + [(set (match_operand:QI 0 "register_operand" "=r") + (ior:QI (and:QI (match_operand:QI 1 "register_operand" "0") + (match_operand:QI 2 "single_zero_operand" "n")) ; = ~[5] + (and:QI (ashift:QI (match_operand:QI 3 "register_operand" "r") + (match_operand:QI 4 "const_int_operand" "n")) + (match_operand:QI 5 "single_one_operand" "n"))))] + + "(avr_test & 8) + && 0xff == (0xff & (INTVAL(operands[5]) ^ INTVAL(operands[2])))" + { + HOST_WIDE_INT to_bit = exact_log2(0xff & INTVAL(operands[5])); + HOST_WIDE_INT from_bit = to_bit - INTVAL(operands[4]); + operands[5] = GEN_INT (to_bit); + operands[4] = GEN_INT (from_bit); + return "bst %3,%4\;bld %0,%5"; + } + [(set_attr "length" "2") + (set_attr "cc" "none")]) + +;; [0].log[4] = [3].log[4] +(define_insn "*insert_1_bit0" + [(set (match_operand:QI 0 "register_operand" "=r") + (ior:QI (and:QI (match_operand:QI 1 "register_operand" "0") + (match_operand:QI 2 "single_zero_operand" "n")) ; = ~[4] + (and:QI (match_operand:QI 3 "register_operand" "r") + (match_operand:QI 4 "single_one_operand" "n"))))] + "(avr_test & 8) + && 0xff == (0xff & (INTVAL(operands[4]) ^ INTVAL(operands[2])))" + { + operands[4] = GEN_INT(exact_log2(0xff & INTVAL(operands[4]))); + return "bst %3,%4\;bld %0,%4"; + } + [(set_attr "length" "2") + (set_attr "cc" "none")]) + +;; special case of pattern below +;; [0].7 |= [1].0 +(define_insn "*iorqi2_ashift_bit7" + [(set (match_operand:QI 0 "register_operand" "=d,r") + (ior:QI (ashift:QI (match_operand:QI 1 "register_operand" "r,r") + (const_int 7)) + (match_operand:QI 2 "register_operand" "0,0")))] + "(avr_test & 16)" + "@ + sbrc %1,0\;ori %0,0x80 + bst %1,0\;sbrs %0,7\;bld %0,7" + [(set_attr "length" "2,3") + (set_attr "cc" "clobber,none")]) + +;; [0].log[3] |= [1].log[3]-[2] +(define_insn "*iorqi2_ashift_bit" + [(set (match_operand:QI 0 "register_operand" "=d,r") + (ior:QI (and:QI (ashift:QI (match_operand:QI 1 "register_operand" "r,r") + (match_operand:QI 2 "const_int_operand" "n,n")) + (match_operand:QI 3 "single_one_operand" "n,n")) + (match_operand:QI 4 "register_operand" "0,0")))] + "(avr_test & 16)" + { + HOST_WIDE_INT to_bit = exact_log2 (0xff & INTVAL(operands[3])); + HOST_WIDE_INT from_bit = to_bit - INTVAL(operands[2]); + + operands[2] = GEN_INT (from_bit); + operands[3] = GEN_INT (to_bit); + + if (0 == which_alternative) + return "sbrc %1,%2\;ori %0,(1<<%3)"; + + return "bst %1,%2\;sbrs %0,%3\;bld %0,%3"; + } + [(set_attr "length" "2,3") + (set_attr "cc" "clobber,none")]) + +;; needs the above pattern as combiner bridge +;; [0].log[3] = [1].log[3]-[2] +(define_insn "*movebit_ashift" + [(set (match_operand:QI 0 "register_operand" "=r") + (ior:QI (and:QI (ashift:QI (match_operand:QI 1 "register_operand" "r") + (match_operand:QI 2 "const_int_operand" "n")) + (match_operand:QI 3 "single_one_operand" "n")) + (and:QI (match_operand:QI 4 "register_operand" "0") + (match_operand:QI 5 "single_zero_operand" "n"))))] ; = ~[3] + "(avr_test & 8) + && 0xff == (0xff & (INTVAL(operands[3]) ^ INTVAL(operands[5])))" + { + HOST_WIDE_INT to_bit = exact_log2 (0xff & INTVAL(operands[3])); + HOST_WIDE_INT from_bit = to_bit - INTVAL(operands[2]); + + operands[2] = GEN_INT (from_bit); + operands[3] = GEN_INT (to_bit); + return "bst %1,%2\;bld %0,%3"; + } + [(set_attr "length" "2") + (set_attr "cc" "none")]) + +;; [0].log[4] |= [2].log[4]+[3] +(define_insn "*iorqi2_shiftrt_bit" + [(set (match_operand:QI 0 "register_operand" "=d,r") + (ior:QI (and:QI (match_operator:QI 1 "shiftrt_operator" + [(match_operand:QI 2 "register_operand" "r,r") + (match_operand:QI 3 "const_int_operand" "n,n")]) + (match_operand:QI 4 "single_one_operand" "n,n")) + (match_operand:QI 5 "register_operand" "0,0")))] + "(avr_test & 16)" + { + HOST_WIDE_INT to_bit = exact_log2 (0xff & INTVAL(operands[4])); + HOST_WIDE_INT from_bit = to_bit + INTVAL(operands[3]); + + operands[3] = GEN_INT (from_bit); + operands[4] = GEN_INT (to_bit); + + if (0 == which_alternative) + return "sbrc %2,%3\;ori %0,(1<<%4)"; + + return "bst %2,%3\;sbrs %0,%4\;bld %0,%4"; + } + [(set_attr "length" "2,3") + (set_attr "cc" "clobber,none")]) + +;; needs the above pattern as combiner bridge +;; [0].log[4] = [2].log[4]+[3] +(define_insn "*movebit_shiftrt" + [(set (match_operand:QI 0 "register_operand" "=r") + (ior:QI (and:QI (match_operator:QI 1 "shiftrt_operator" + [(match_operand:QI 2 "register_operand" "r") + (match_operand:QI 3 "const_int_operand" "n")]) + (match_operand:QI 4 "single_one_operand" "n")) + (and:QI (match_operand:QI 5 "register_operand" "0") + (match_operand:QI 6 "single_zero_operand" "n"))))] ; = ~[4] + "(avr_test & 8) + && 0xff == (0xff & (INTVAL(operands[4]) ^ INTVAL(operands[6])))" + { + HOST_WIDE_INT to_bit = exact_log2 (0xff & INTVAL(operands[4])); + HOST_WIDE_INT from_bit = to_bit + INTVAL(operands[3]); + + operands[3] = GEN_INT (from_bit); + operands[4] = GEN_INT (to_bit); + return "bst %2,%3\;bld %0,%4"; + } + [(set_attr "length" "2") + (set_attr "cc" "none")]) + +;; same as above, for .0 <- .7 +;; [0].0 = [2].7 +(define_insn "*movebit_shiftrt_7" + [(set (match_operand:QI 0 "register_operand" "=r") + (ior:QI (and:QI (match_operand:QI 1 "register_operand" "0") + (match_operand:QI 2 "single_zero_operand" "n")) ; ~1 + (match_operator:QI 3 "shiftrt_operator" + [(match_operand:QI 4 "register_operand" "r") + (const_int 7)])))] + "(avr_test & 8) && 0xfe == (0xff & INTVAL(operands[2]))" + "bst %4,7\;bld %0,0" + [(set_attr "length" "2") + (set_attr "cc" "none")]) + +;; same as above, for .7 <- .0 +;; [0].7 = [3].0 +(define_insn "*movebit_ashift_7" + [(set (match_operand:QI 0 "register_operand" "=r") + (ior:QI (and:QI (match_operand:QI 1 "register_operand" "0") + (match_operand:QI 2 "single_zero_operand" "n")) ; 0x7f + (ashift:QI (match_operand:QI 3 "register_operand" "r") + (const_int 7))))] + "(avr_test & 8) && 0x7f == (0xff & INTVAL(operands[2]))" + "bst %3,0\;bld %0,7" + [(set_attr "length" "2") + (set_attr "cc" "none")]) + +;;[0] |= [1].0 +(define_insn "*ior<MODE>2_<MODE>bit0" + [(set (match_operand:QIHISI 0 "register_operand" "=d,r") + (ior:QIHISI (and:QIHISI (match_operand:QIHISI 1 "register_operand" "r,r") + (const_int 1)) + (match_operand:QIHISI 2 "register_operand" "0,0")))] + "(avr_test & 16)" + "@ + sbrc %A1,0\;ori %A0,1 + bst %A1,0\;sbrs %A0,0\;bld %A0,0" + [(set_attr "length" "2,3") + (set_attr "cc" "clobber,none")]) + +;; Same as above for bit [2] +;; [0] |= [1].[2] +;; FIXME: [0] and [1] need not to have the same mode. +;; Do mode iterators allow cross products by introducing +;; more than one iterator per insn? +(define_insn "*ior<MODE>2_zeroextract_<MODE>bit" + [(set (match_operand:QIHISI 0 "register_operand" "=d,r") + (ior:QIHISI (zero_extract:QIHISI (match_operand:QIHISI 1 "register_operand" "r,r") + (const_int 1) + (match_operand:QI 2 "const_int_operand" "n,n")) + (match_operand:QIHISI 3 "register_operand" "0,0")))] + "(avr_test & 16)" + "@ + sbrc %A1,%2\;ori %A0,1 + bst %A1,%2\;sbrs %A0,0\;bld %A0,0" + [(set_attr "length" "2,3") + (set_attr "cc" "clobber,none")]) + +;; [0] = [1].[2] +(define_insn "*zero_extendqihi2_bit" + [(set (match_operand:HI 0 "register_operand" "=d,*r") + (zero_extend:HI (zero_extract:QI (match_operand:QI 1 "register_operand" "0,r") + (const_int 1) + (match_operand:QI 2 "const_int_operand" "P,n"))))] + "(avr_test & 2) && INTVAL (operands[2]) > 0" + "@ + lsr %A0\;andi %A0,1\;clr %B0 + bst %A1,%2\;clr %A0\;bld %A0,0\;clr %B0" + [(set_attr "length" "3,4") + (set_attr "cc" "clobber,clobber")]) Index: gcc/config/avr/avr.c =================================================================== --- gcc/config/avr/avr.c (revision 142396) +++ gcc/config/avr/avr.c (working copy) @@ -4260,10 +4260,16 @@ adjust_insn_length (rtx insn, int len) { HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1)); if (GET_MODE (op[1]) == SImode) + { + if (avr_test && test_hard_reg_class (NO_LD_REGS, op[0]) + && exact_log2 (mask) >= 0) + len = 2; + else len = (((mask & 0xff) != 0) + ((mask & 0xff00) != 0) + ((mask & 0xff0000L) != 0) + ((mask & 0xff000000L) != 0)); + } else if (GET_MODE (op[1]) == HImode) len = (((mask & 0xff) != 0) + ((mask & 0xff00) != 0)); Index: gcc/config/avr/avr.opt =================================================================== --- gcc/config/avr/avr.opt (revision 142396) +++ gcc/config/avr/avr.opt (working copy) @@ -66,3 +66,6 @@ Relax branches mpmem-wrap-around Target Report Make the linker relaxation machine assume that a program counter wrap-around occures. + +mtest= +Target RejectNegative Report Joined Undocumented UInteger Var(avr_test) Init(0)
#define CLOBBER_H \ asm volatile ("; clobber \"d\"" ::: \ "16", "17", "18", "19", \ "20", "21", "22", "23", "24", "25", \ "26", "27", "28", "29", "30", "31") #define RELOAD(REGCLASS,VAR) \ asm volatile ("; RELOAD " #VAR " to \"" REGCLASS "\"" \ : "+" REGCLASS (VAR)) #define REG (*(((unsigned char volatile *) 0x20))) typedef union { struct { unsigned b0:1; unsigned b1:1; unsigned b2:1; unsigned b3:1; unsigned b4:1; unsigned b5:1; unsigned b6:1; unsigned b7:1; }; struct { unsigned n0:4; unsigned n1:4; }; unsigned char c; } u_t; u_t s, t; unsigned char i; unsigned short g, h; char z,y; /**************************************/ long or1 (long i) { RELOAD ("l", i); i |= (1l << 20); RELOAD ("l", i); return i; } #if 1 unsigned char extract_sfrbit_1() { unsigned char i = 0; if (REG & 64) i = 1; return i; } void insert_1_bitashift (unsigned char b) { s.b6 = t.b6; } unsigned char shiftrt_and1 (unsigned char c) { i = (c >> 4) & 1; return c; } unsigned char shiftrt_1_bit (unsigned char c) { i = (c >> 4) & 2; return c; } unsigned char ashift_1_bit (unsigned char c) { i = (c << 5) & 64; return c; } void extract_sfrbit_2() { unsigned char j = 0; if (REG & 64) j = 1; i = j; } int extract_gprbit (int i) { return (i & (1 << 14)) ? 1 : 0; } void bar() { if (REG & 64) i = 0; s.b6 = i; if (i & 2) REG |= 64; } void movebit_ashift() { s.b6 = s.b5; s.b7 = s.b3; } void movebit_shiftrt() { s.b3 = s.b7; s.b5 = s.b6; } void movebit_07() { s.b0 = s.b7; } void movebit_71() { s.b7 = s.b1; } void movebit_70() { s.b7 = s.b0; } unsigned char revert (unsigned char n) { u_t u = {.c = n}; u_t v; v.b0 = u.b7; v.b1 = u.b6; v.b2 = u.b5; v.b3 = u.b4; v.b4 = u.b3; v.b5 = u.b2; v.b6 = u.b1; v.b7 = u.b0; return v.c; } void test_sfrbit () { if ((REG >> 6) & 1) y = 1; } void test_gprbit (unsigned char reg) { if (1 & (reg >> 6)) y = 1; } void get_longbit (long x) { i = (x >> 25) & 1; s.b3 = (x & (1ul << 14)) ? 1 : 0; s.b0 = (x & (1ul << 24)) ? 1 : 0; } unsigned char copy (unsigned char n) { u_t u = {.c = n}; u_t v = {.c = i}; v.b0 = u.b0; v.b4 = u.b4; v.b7 = u.b7; return v.c; } unsigned char is_ne0 (unsigned char n) { u_t u = {.c = n}; return u.b0 | u.b1 | u.b2 | u.b3 | u.b4 | u.b5 | u.b6 | u.b7; } #endif
_______________________________________________ AVR-GCC-list mailing list AVR-GCC-list@nongnu.org http://lists.nongnu.org/mailman/listinfo/avr-gcc-list