Move the rules for CBZ/TBZ to be above the rules for
CBB<cond>/CBH<cond>/CB<cond>. We want them to have higher priority
because they can express larger displacements.
gcc/ChangeLog:
* config/aarch64/aarch64.md (aarch64_cbz<optab><mode>1): Move
above rules for CBB<cond>/CBH<cond>/CB<cond>.
(*aarch64_tbz<optab><mode>1): Likewise.
gcc/testsuite/ChangeLog:
* gcc.target/aarch64/cmpbr.c: Update tests.
---
gcc/config/aarch64/aarch64.md | 170 ++++++++++++-----------
gcc/testsuite/gcc.target/aarch64/cmpbr.c | 35 ++---
2 files changed, 110 insertions(+), 95 deletions(-)
diff --git a/gcc/config/aarch64/aarch64.md b/gcc/config/aarch64/aarch64.md
index 641c3653a40..aa528cd13b4 100644
--- a/gcc/config/aarch64/aarch64.md
+++ b/gcc/config/aarch64/aarch64.md
@@ -697,27 +697,38 @@ (define_insn "jump"
;; Maximum PC-relative positive/negative displacements for various branching
;; instructions.
(define_constants
[
;; +/- 128MiB. Used by B, BL.
(BRANCH_LEN_P_128MiB 134217724)
(BRANCH_LEN_N_128MiB -134217728)
;; +/- 1MiB. Used by B.<cond>, CBZ, CBNZ.
(BRANCH_LEN_P_1MiB 1048572)
(BRANCH_LEN_N_1MiB -1048576)
;; +/- 32KiB. Used by TBZ, TBNZ.
(BRANCH_LEN_P_32KiB 32764)
(BRANCH_LEN_N_32KiB -32768)
;; +/- 1KiB. Used by CBB<cond>, CBH<cond>, CB<cond>.
(BRANCH_LEN_P_1Kib 1020)
(BRANCH_LEN_N_1Kib -1024)
]
)
;; -------------------------------------------------------------------
;; Conditional jumps
+;; The order of the rules below is important.
+;; Higher priority rules are preferred because they can express larger
+;; displacements.
+;; 1) EQ/NE comparisons against zero are handled by CBZ/CBNZ.
+;; 2) LT/GE comparisons against zero are handled by TBZ/TBNZ.
+;; 3) When the CMPBR extension is enabled:
+;; a) Comparisons between two registers are handled by
+;; CBB<cond>/CBH<cond>/CB<cond>.
+;; b) Comparisons between a GP register and an immediate in the range 0-63
are
+;; handled by CB<cond>.
+;; 4) Otherwise, emit a CMP+B<cond> sequence.
;; -------------------------------------------------------------------
(define_expand "cbranch<GPI:mode>4"
@@ -770,63 +781,140 @@ (define_expand "cbranch<mode>4"
(define_expand "cbranchcc4"
[(set (pc) (if_then_else (match_operator 0 "aarch64_comparison_operator"
[(match_operand 1 "cc_register")
(match_operand 2 "const0_operand")])
(label_ref (match_operand 3))
(pc)))]
""
""
)
+;; For an EQ/NE comparison against zero, emit `CBZ`/`CBNZ`
+(define_insn "aarch64_cbz<optab><mode>1"
+ [(set (pc) (if_then_else (EQL (match_operand:GPI 0 "register_operand" "r")
+ (const_int 0))
+ (label_ref (match_operand 1))
+ (pc)))]
+ "!aarch64_track_speculation"
+ {
+ if (get_attr_length (insn) == 8)
+ return aarch64_gen_far_branch (operands, 1, "Lcb", "<inv_cb>\\t%<w>0, ");
+ else
+ return "<cbz>\\t%<w>0, %l1";
+ }
+ [(set_attr "type" "branch")
+ (set (attr "length")
+ (if_then_else (and (ge (minus (match_dup 1) (pc))
+ (const_int BRANCH_LEN_N_1MiB))
+ (lt (minus (match_dup 1) (pc))
+ (const_int BRANCH_LEN_P_1MiB)))
+ (const_int 4)
+ (const_int 8)))
+ (set (attr "far_branch")
+ (if_then_else (and (ge (minus (match_dup 2) (pc))
+ (const_int BRANCH_LEN_N_1MiB))
+ (lt (minus (match_dup 2) (pc))
+ (const_int BRANCH_LEN_P_1MiB)))
+ (const_string "no")
+ (const_string "yes")))]
+)
+
+;; For an LT/GE comparison against zero, emit `TBZ`/`TBNZ`
+(define_insn "*aarch64_tbz<optab><mode>1"
+ [(set (pc) (if_then_else (LTGE (match_operand:ALLI 0 "register_operand" "r")
+ (const_int 0))
+ (label_ref (match_operand 1))
+ (pc)))
+ (clobber (reg:CC CC_REGNUM))]
+ "!aarch64_track_speculation"
+ {
+ if (get_attr_length (insn) == 8)
+ {
+ if (get_attr_far_branch (insn) == FAR_BRANCH_YES)
+ return aarch64_gen_far_branch (operands, 1, "Ltb",
+ "<inv_tb>\\t%<w>0, <sizem1>, ");
+ else
+ {
+ char buf[64];
+ uint64_t val = ((uint64_t) 1)
+ << (GET_MODE_SIZE (<MODE>mode) * BITS_PER_UNIT - 1);
+ sprintf (buf, "tst\t%%<w>0, %" PRId64, val);
+ output_asm_insn (buf, operands);
+ return "<bcond>\t%l1";
+ }
+ }
+ else
+ return "<tbz>\t%<w>0, <sizem1>, %l1";
+ }
+ [(set_attr "type" "branch")
+ (set (attr "length")
+ (if_then_else (and (ge (minus (match_dup 1) (pc))
+ (const_int BRANCH_LEN_N_32KiB))
+ (lt (minus (match_dup 1) (pc))
+ (const_int BRANCH_LEN_P_32KiB)))
+ (const_int 4)
+ (const_int 8)))
+ (set (attr "far_branch")
+ (if_then_else (and (ge (minus (match_dup 1) (pc))
+ (const_int BRANCH_LEN_N_1MiB))
+ (lt (minus (match_dup 1) (pc))
+ (const_int BRANCH_LEN_P_1MiB)))
+ (const_string "no")
+ (const_string "yes")))]
+)
+
;; Emit a `CB<cond> (register)` or `CB<cond> (immediate)` instruction.
+;; Only immediates in the range 0-63 are supported.
+;; Comparisons against immediates outside this range fall back to
+;; CMP + B<cond>.
(define_insn "aarch64_cb<GPI:mode>"
[(set (pc) (if_then_else (match_operator 0 "aarch64_comparison_operator"
- [(match_operand:GPI 1 "register_operand")
- (match_operand:GPI 2 "aarch64_cb_operand")])
+ [(match_operand:GPI 1 "register_operand" "r")
+ (match_operand:GPI 2 "aarch64_cb_operand" "ri")])
(label_ref (match_operand 3))
(pc)))]
"TARGET_CMPBR"
"cb%m0\\t%<w>1, %<w>2, %l3";
[(set_attr "type" "branch")
(set (attr "length")
(if_then_else (and (ge (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_N_1Kib))
(lt (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_P_1Kib)))
(const_int 4)
(const_int 8)))
(set (attr "far_branch")
(if_then_else (and (ge (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_N_1Kib))
(lt (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_P_1Kib)))
(const_string "no")
(const_string "yes")))]
)
;; Emit a `CBB<cond> (register)` or `CBH<cond> (register)` instruction.
(define_insn "aarch64_cb<SHORT:mode>"
[(set (pc) (if_then_else (match_operator 0 "aarch64_comparison_operator"
- [(match_operand:SHORT 1 "register_operand")
- (match_operand:SHORT 2
"aarch64_cb_short_operand")])
+ [(match_operand:SHORT 1 "register_operand" "r")
+ (match_operand:SHORT 2 "aarch64_cb_short_operand"
"rZ")])
(label_ref (match_operand 3))
(pc)))]
"TARGET_CMPBR"
"cb<SHORT:cmpbr_suffix>%m0\\t%<w>1, %<w>2, %l3";
[(set_attr "type" "branch")
(set (attr "length")
(if_then_else (and (ge (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_N_1Kib))
(lt (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_P_1Kib)))
(const_int 4)
(const_int 8)))
(set (attr "far_branch")
(if_then_else (and (ge (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_N_1Kib))
(lt (minus (match_dup 3) (pc))
(const_int BRANCH_LEN_P_1Kib)))
(const_string "no")
(const_string "yes")))]
)
;; Emit `B<cond>`, assuming that the condition is already in the CC register.
@@ -867,112 +955,38 @@ (define_insn "aarch64_bcond"
;; For a 24-bit immediate CST we can optimize the compare for equality
;; and branch sequence from:
;; mov x0, #imm1
;; movk x0, #imm2, lsl 16 /* x0 contains CST. */
;; cmp x1, x0
;; b<ne,eq> .Label
;; into the shorter:
;; sub x0, x1, #(CST & 0xfff000)
;; subs x0, x0, #(CST & 0x000fff)
;; b<ne,eq> .Label
(define_insn_and_split "*aarch64_bcond_wide_imm<GPI:mode>"
[(set (pc) (if_then_else (EQL (match_operand:GPI 0 "register_operand" "r")
(match_operand:GPI 1 "aarch64_imm24" "n"))
(label_ref:P (match_operand 2))
(pc)))]
"!aarch64_move_imm (INTVAL (operands[1]), <GPI:MODE>mode)
&& !aarch64_plus_operand (operands[1], <GPI:MODE>mode)
&& !reload_completed"
"#"
"&& true"
[(const_int 0)]
{
HOST_WIDE_INT lo_imm = UINTVAL (operands[1]) & 0xfff;
HOST_WIDE_INT hi_imm = UINTVAL (operands[1]) & 0xfff000;
rtx tmp = gen_reg_rtx (<GPI:MODE>mode);
emit_insn (gen_add<GPI:mode>3 (tmp, operands[0], GEN_INT (-hi_imm)));
emit_insn (gen_add<GPI:mode>3_compare0 (tmp, tmp, GEN_INT (-lo_imm)));
rtx cc_reg = gen_rtx_REG (CC_NZmode, CC_REGNUM);
rtx cmp_rtx = gen_rtx_fmt_ee (<EQL:CMP>, <GPI:MODE>mode,
cc_reg, const0_rtx);
emit_jump_insn (gen_aarch64_bcond (cmp_rtx, cc_reg, operands[2]));
DONE;
}
)
-;; For an EQ/NE comparison against zero, emit `CBZ`/`CBNZ`
-(define_insn "aarch64_cbz<optab><mode>1"
- [(set (pc) (if_then_else (EQL (match_operand:GPI 0 "register_operand" "r")
- (const_int 0))
- (label_ref (match_operand 1))
- (pc)))]
- "!aarch64_track_speculation"
- {
- if (get_attr_length (insn) == 8)
- return aarch64_gen_far_branch (operands, 1, "Lcb", "<inv_cb>\\t%<w>0, ");
- else
- return "<cbz>\\t%<w>0, %l1";
- }
- [(set_attr "type" "branch")
- (set (attr "length")
- (if_then_else (and (ge (minus (match_dup 1) (pc))
- (const_int BRANCH_LEN_N_1MiB))
- (lt (minus (match_dup 1) (pc))
- (const_int BRANCH_LEN_P_1MiB)))
- (const_int 4)
- (const_int 8)))
- (set (attr "far_branch")
- (if_then_else (and (ge (minus (match_dup 2) (pc))
- (const_int BRANCH_LEN_N_1MiB))
- (lt (minus (match_dup 2) (pc))
- (const_int BRANCH_LEN_P_1MiB)))
- (const_string "no")
- (const_string "yes")))]
-)
-
-;; For an LT/GE comparison against zero, emit `TBZ`/`TBNZ`
-(define_insn "*aarch64_tbz<optab><mode>1"
- [(set (pc) (if_then_else (LTGE (match_operand:ALLI 0 "register_operand" "r")
- (const_int 0))
- (label_ref (match_operand 1))
- (pc)))
- (clobber (reg:CC CC_REGNUM))]
- "!aarch64_track_speculation"
- {
- if (get_attr_length (insn) == 8)
- {
- if (get_attr_far_branch (insn) == FAR_BRANCH_YES)
- return aarch64_gen_far_branch (operands, 1, "Ltb",
- "<inv_tb>\\t%<w>0, <sizem1>, ");
- else
- {
- char buf[64];
- uint64_t val = ((uint64_t) 1)
- << (GET_MODE_SIZE (<MODE>mode) * BITS_PER_UNIT - 1);
- sprintf (buf, "tst\t%%<w>0, %" PRId64, val);
- output_asm_insn (buf, operands);
- return "<bcond>\t%l1";
- }
- }
- else
- return "<tbz>\t%<w>0, <sizem1>, %l1";
- }
- [(set_attr "type" "branch")
- (set (attr "length")
- (if_then_else (and (ge (minus (match_dup 1) (pc))
- (const_int BRANCH_LEN_N_32KiB))
- (lt (minus (match_dup 1) (pc))
- (const_int BRANCH_LEN_P_32KiB)))
- (const_int 4)
- (const_int 8)))
- (set (attr "far_branch")
- (if_then_else (and (ge (minus (match_dup 1) (pc))
- (const_int BRANCH_LEN_N_1MiB))
- (lt (minus (match_dup 1) (pc))
- (const_int BRANCH_LEN_P_1MiB)))
- (const_string "no")
- (const_string "yes")))]
-)
-
;; -------------------------------------------------------------------
;; Test bit and branch
;; -------------------------------------------------------------------
diff --git a/gcc/testsuite/gcc.target/aarch64/cmpbr.c
b/gcc/testsuite/gcc.target/aarch64/cmpbr.c
index 901c18dba4c..a9342aaf124 100644
--- a/gcc/testsuite/gcc.target/aarch64/cmpbr.c
+++ b/gcc/testsuite/gcc.target/aarch64/cmpbr.c
@@ -67,1172 +67,1173 @@ COMPARE_ALL(u16, i16, 65);
COMPARE_ALL(u32, i32, 65);
COMPARE_ALL(u64, i64, 65);
-// Comparisons against zero can use the wzr/xzr register.
+// Comparisons against zero can use CBZ/CBNZ, TBZ/TBNZ or CB<cond> against the
+// wzr/xzr register.
COMPARE_ALL(u8, i8, 0);
COMPARE_ALL(u16, i16, 0);
COMPARE_ALL(u32, i32, 0);
COMPARE_ALL(u64, i64, 0);
/*
** u8_x0_eq_x1:
** cbbeq w1, w0, .L4
** b not_taken
** b taken
*/
/*
** u8_x0_ne_x1:
** cbbeq w1, w0, .L6
** b taken
** b not_taken
*/
/*
** u8_x0_ult_x1:
** cbbls w1, w0, .L8
** b taken
** b not_taken
*/
/*
** u8_x0_ule_x1:
** cbbcc w1, w0, .L10
** b taken
** b not_taken
*/
/*
** u8_x0_ugt_x1:
** cbbcs w1, w0, .L12
** b taken
** b not_taken
*/
/*
** u8_x0_uge_x1:
** cbbhi w1, w0, .L14
** b taken
** b not_taken
*/
/*
** i8_x0_slt_x1:
** cbble w1, w0, .L16
** b taken
** b not_taken
*/
/*
** i8_x0_sle_x1:
** cbblt w1, w0, .L18
** b taken
** b not_taken
*/
/*
** i8_x0_sgt_x1:
** cbbge w1, w0, .L20
** b taken
** b not_taken
*/
/*
** i8_x0_sge_x1:
** cbbgt w1, w0, .L22
** b taken
** b not_taken
*/
/*
** u16_x0_eq_x1:
** cbheq w1, w0, .L25
** b not_taken
** b taken
*/
/*
** u16_x0_ne_x1:
** cbheq w1, w0, .L27
** b taken
** b not_taken
*/
/*
** u16_x0_ult_x1:
** cbhls w1, w0, .L29
** b taken
** b not_taken
*/
/*
** u16_x0_ule_x1:
** cbhcc w1, w0, .L31
** b taken
** b not_taken
*/
/*
** u16_x0_ugt_x1:
** cbhcs w1, w0, .L33
** b taken
** b not_taken
*/
/*
** u16_x0_uge_x1:
** cbhhi w1, w0, .L35
** b taken
** b not_taken
*/
/*
** i16_x0_slt_x1:
** cbhle w1, w0, .L37
** b taken
** b not_taken
*/
/*
** i16_x0_sle_x1:
** cbhlt w1, w0, .L39
** b taken
** b not_taken
*/
/*
** i16_x0_sgt_x1:
** cbhge w1, w0, .L41
** b taken
** b not_taken
*/
/*
** i16_x0_sge_x1:
** cbhgt w1, w0, .L43
** b taken
** b not_taken
*/
/*
** u32_x0_eq_x1:
** cbeq w0, w1, .L46
** b not_taken
** b taken
*/
/*
** u32_x0_ne_x1:
** cbeq w0, w1, .L48
** b taken
** b not_taken
*/
/*
** u32_x0_ult_x1:
** cbcs w0, w1, .L50
** b taken
** b not_taken
*/
/*
** u32_x0_ule_x1:
** cbhi w0, w1, .L52
** b taken
** b not_taken
*/
/*
** u32_x0_ugt_x1:
** cbls w0, w1, .L54
** b taken
** b not_taken
*/
/*
** u32_x0_uge_x1:
** cbcc w0, w1, .L56
** b taken
** b not_taken
*/
/*
** i32_x0_slt_x1:
** cbge w0, w1, .L58
** b taken
** b not_taken
*/
/*
** i32_x0_sle_x1:
** cbgt w0, w1, .L60
** b taken
** b not_taken
*/
/*
** i32_x0_sgt_x1:
** cble w0, w1, .L62
** b taken
** b not_taken
*/
/*
** i32_x0_sge_x1:
** cblt w0, w1, .L64
** b taken
** b not_taken
*/
/*
** u64_x0_eq_x1:
** cbeq x0, x1, .L67
** b not_taken
** b taken
*/
/*
** u64_x0_ne_x1:
** cbeq x0, x1, .L69
** b taken
** b not_taken
*/
/*
** u64_x0_ult_x1:
** cbcs x0, x1, .L71
** b taken
** b not_taken
*/
/*
** u64_x0_ule_x1:
** cbhi x0, x1, .L73
** b taken
** b not_taken
*/
/*
** u64_x0_ugt_x1:
** cbls x0, x1, .L75
** b taken
** b not_taken
*/
/*
** u64_x0_uge_x1:
** cbcc x0, x1, .L77
** b taken
** b not_taken
*/
/*
** i64_x0_slt_x1:
** cbge x0, x1, .L79
** b taken
** b not_taken
*/
/*
** i64_x0_sle_x1:
** cbgt x0, x1, .L81
** b taken
** b not_taken
*/
/*
** i64_x0_sgt_x1:
** cble x0, x1, .L83
** b taken
** b not_taken
*/
/*
** i64_x0_sge_x1:
** cblt x0, x1, .L85
** b taken
** b not_taken
*/
/*
** u32_x0_eq_42:
** cbeq w0, 42, .L88
** b not_taken
** b taken
*/
/*
** u32_x0_ne_42:
** cbeq w0, 42, .L90
** b taken
** b not_taken
*/
/*
** u32_x0_ult_42:
** cbhi w0, 41, .L92
** b taken
** b not_taken
*/
/*
** u32_x0_ule_42:
** cbhi w0, 42, .L94
** b taken
** b not_taken
*/
/*
** u32_x0_ugt_42:
** cbls w0, 42, .L96
** b taken
** b not_taken
*/
/*
** u32_x0_uge_42:
** cbls w0, 41, .L98
** b taken
** b not_taken
*/
/*
** i32_x0_slt_42:
** cbgt w0, 41, .L100
** b taken
** b not_taken
*/
/*
** i32_x0_sle_42:
** cbgt w0, 42, .L102
** b taken
** b not_taken
*/
/*
** i32_x0_sgt_42:
** cble w0, 42, .L104
** b taken
** b not_taken
*/
/*
** i32_x0_sge_42:
** cble w0, 41, .L106
** b taken
** b not_taken
*/
/*
** u64_x0_eq_42:
** cbeq x0, 42, .L109
** b not_taken
** b taken
*/
/*
** u64_x0_ne_42:
** cbeq x0, 42, .L111
** b taken
** b not_taken
*/
/*
** u64_x0_ult_42:
** cbhi x0, 41, .L113
** b taken
** b not_taken
*/
/*
** u64_x0_ule_42:
** cbhi x0, 42, .L115
** b taken
** b not_taken
*/
/*
** u64_x0_ugt_42:
** cbls x0, 42, .L117
** b taken
** b not_taken
*/
/*
** u64_x0_uge_42:
** cbls x0, 41, .L119
** b taken
** b not_taken
*/
/*
** i64_x0_slt_42:
** cbgt x0, 41, .L121
** b taken
** b not_taken
*/
/*
** i64_x0_sle_42:
** cbgt x0, 42, .L123
** b taken
** b not_taken
*/
/*
** i64_x0_sgt_42:
** cble x0, 42, .L125
** b taken
** b not_taken
*/
/*
** i64_x0_sge_42:
** cble x0, 41, .L127
** b taken
** b not_taken
*/
/*
** u8_x0_eq_42:
** mov w1, 42
** cbbeq w0, w1, .L130
** b not_taken
** b taken
*/
/*
** u8_x0_ne_42:
** mov w1, 42
** cbbeq w0, w1, .L132
** b taken
** b not_taken
*/
/*
** u8_x0_ult_42:
** mov w1, 41
** cbbhi w0, w1, .L134
** b taken
** b not_taken
*/
/*
** u8_x0_ule_42:
** mov w1, 42
** cbbhi w0, w1, .L136
** b taken
** b not_taken
*/
/*
** u8_x0_ugt_42:
** mov w1, 42
** cbbls w0, w1, .L138
** b taken
** b not_taken
*/
/*
** u8_x0_uge_42:
** mov w1, 41
** cbbls w0, w1, .L140
** b taken
** b not_taken
*/
/*
** i8_x0_slt_42:
** mov w1, 41
** cbbgt w0, w1, .L142
** b taken
** b not_taken
*/
/*
** i8_x0_sle_42:
** mov w1, 42
** cbbgt w0, w1, .L144
** b taken
** b not_taken
*/
/*
** i8_x0_sgt_42:
** mov w1, 42
** cbble w0, w1, .L146
** b taken
** b not_taken
*/
/*
** i8_x0_sge_42:
** mov w1, 41
** cbble w0, w1, .L148
** b taken
** b not_taken
*/
/*
** u16_x0_eq_42:
** mov w1, 42
** cbheq w0, w1, .L151
** b not_taken
** b taken
*/
/*
** u16_x0_ne_42:
** mov w1, 42
** cbheq w0, w1, .L153
** b taken
** b not_taken
*/
/*
** u16_x0_ult_42:
** mov w1, 41
** cbhhi w0, w1, .L155
** b taken
** b not_taken
*/
/*
** u16_x0_ule_42:
** mov w1, 42
** cbhhi w0, w1, .L157
** b taken
** b not_taken
*/
/*
** u16_x0_ugt_42:
** mov w1, 42
** cbhls w0, w1, .L159
** b taken
** b not_taken
*/
/*
** u16_x0_uge_42:
** mov w1, 41
** cbhls w0, w1, .L161
** b taken
** b not_taken
*/
/*
** i16_x0_slt_42:
** mov w1, 41
** cbhgt w0, w1, .L163
** b taken
** b not_taken
*/
/*
** i16_x0_sle_42:
** mov w1, 42
** cbhgt w0, w1, .L165
** b taken
** b not_taken
*/
/*
** i16_x0_sgt_42:
** mov w1, 42
** cbhle w0, w1, .L167
** b taken
** b not_taken
*/
/*
** i16_x0_sge_42:
** mov w1, 41
** cbhle w0, w1, .L169
** b taken
** b not_taken
*/
/*
** u8_x0_eq_65:
** mov w1, 65
** cbbeq w0, w1, .L172
** b not_taken
** b taken
*/
/*
** u8_x0_ne_65:
** mov w1, 65
** cbbeq w0, w1, .L174
** b taken
** b not_taken
*/
/*
** u8_x0_ult_65:
** mov w1, 64
** cbbhi w0, w1, .L176
** b taken
** b not_taken
*/
/*
** u8_x0_ule_65:
** mov w1, 65
** cbbhi w0, w1, .L178
** b taken
** b not_taken
*/
/*
** u8_x0_ugt_65:
** mov w1, 65
** cbbls w0, w1, .L180
** b taken
** b not_taken
*/
/*
** u8_x0_uge_65:
** mov w1, 64
** cbbls w0, w1, .L182
** b taken
** b not_taken
*/
/*
** i8_x0_slt_65:
** mov w1, 64
** cbbgt w0, w1, .L184
** b taken
** b not_taken
*/
/*
** i8_x0_sle_65:
** mov w1, 65
** cbbgt w0, w1, .L186
** b taken
** b not_taken
*/
/*
** i8_x0_sgt_65:
** mov w1, 65
** cbble w0, w1, .L188
** b taken
** b not_taken
*/
/*
** i8_x0_sge_65:
** mov w1, 64
** cbble w0, w1, .L190
** b taken
** b not_taken
*/
/*
** u16_x0_eq_65:
** mov w1, 65
** cbheq w0, w1, .L193
** b not_taken
** b taken
*/
/*
** u16_x0_ne_65:
** mov w1, 65
** cbheq w0, w1, .L195
** b taken
** b not_taken
*/
/*
** u16_x0_ult_65:
** mov w1, 64
** cbhhi w0, w1, .L197
** b taken
** b not_taken
*/
/*
** u16_x0_ule_65:
** mov w1, 65
** cbhhi w0, w1, .L199
** b taken
** b not_taken
*/
/*
** u16_x0_ugt_65:
** mov w1, 65
** cbhls w0, w1, .L201
** b taken
** b not_taken
*/
/*
** u16_x0_uge_65:
** mov w1, 64
** cbhls w0, w1, .L203
** b taken
** b not_taken
*/
/*
** i16_x0_slt_65:
** mov w1, 64
** cbhgt w0, w1, .L205
** b taken
** b not_taken
*/
/*
** i16_x0_sle_65:
** mov w1, 65
** cbhgt w0, w1, .L207
** b taken
** b not_taken
*/
/*
** i16_x0_sgt_65:
** mov w1, 65
** cbhle w0, w1, .L209
** b taken
** b not_taken
*/
/*
** i16_x0_sge_65:
** mov w1, 64
** cbhle w0, w1, .L211
** b taken
** b not_taken
*/
/*
** u32_x0_eq_65:
** cmp w0, 65
** beq .L214
** b not_taken
** b taken
*/
/*
** u32_x0_ne_65:
** cmp w0, 65
** beq .L216
** b taken
** b not_taken
*/
/*
** u32_x0_ult_65:
** cmp w0, 64
** bhi .L218
** b taken
** b not_taken
*/
/*
** u32_x0_ule_65:
** cmp w0, 65
** bhi .L220
** b taken
** b not_taken
*/
/*
** u32_x0_ugt_65:
** cmp w0, 65
** bls .L222
** b taken
** b not_taken
*/
/*
** u32_x0_uge_65:
** cmp w0, 64
** bls .L224
** b taken
** b not_taken
*/
/*
** i32_x0_slt_65:
** cmp w0, 64
** bgt .L226
** b taken
** b not_taken
*/
/*
** i32_x0_sle_65:
** cmp w0, 65
** bgt .L228
** b taken
** b not_taken
*/
/*
** i32_x0_sgt_65:
** cmp w0, 65
** ble .L230
** b taken
** b not_taken
*/
/*
** i32_x0_sge_65:
** cmp w0, 64
** ble .L232
** b taken
** b not_taken
*/
/*
** u64_x0_eq_65:
** cmp x0, 65
** beq .L235
** b not_taken
** b taken
*/
/*
** u64_x0_ne_65:
** cmp x0, 65
** beq .L237
** b taken
** b not_taken
*/
/*
** u64_x0_ult_65:
** cmp x0, 64
** bhi .L239
** b taken
** b not_taken
*/
/*
** u64_x0_ule_65:
** cmp x0, 65
** bhi .L241
** b taken
** b not_taken
*/
/*
** u64_x0_ugt_65:
** cmp x0, 65
** bls .L243
** b taken
** b not_taken
*/
/*
** u64_x0_uge_65:
** cmp x0, 64
** bls .L245
** b taken
** b not_taken
*/
/*
** i64_x0_slt_65:
** cmp x0, 64
** bgt .L247
** b taken
** b not_taken
*/
/*
** i64_x0_sle_65:
** cmp x0, 65
** bgt .L249
** b taken
** b not_taken
*/
/*
** i64_x0_sgt_65:
** cmp x0, 65
** ble .L251
** b taken
** b not_taken
*/
/*
** i64_x0_sge_65:
** cmp x0, 64
** ble .L253
** b taken
** b not_taken
*/
/*
** u8_x0_eq_0:
** cbbne w0, wzr, .L255
** b taken
** b not_taken
*/
/*
** u8_x0_ne_0:
** cbbeq w0, wzr, .L257
** b taken
** b not_taken
*/
/*
** u8_x0_ult_0:
** b not_taken
*/
/*
** u8_x0_ule_0:
** cbbne w0, wzr, .L260
** b taken
** b not_taken
*/
/*
** u8_x0_ugt_0:
** cbbeq w0, wzr, .L262
** b taken
** b not_taken
*/
/*
** u8_x0_uge_0:
** b taken
*/
/*
** i8_x0_slt_0:
-** cbblt w0, wzr, .L266
+** tbnz w0, #7, .L266
** b not_taken
** b taken
*/
/*
** i8_x0_sle_0:
** cbble w0, wzr, .L269
** b not_taken
** b taken
*/
/*
** i8_x0_sgt_0:
** cbble w0, wzr, .L271
** b taken
** b not_taken
*/
/*
** i8_x0_sge_0:
-** cbblt w0, wzr, .L273
+** tbnz w0, #7, .L273
** b taken
** b not_taken
*/
/*
** u16_x0_eq_0:
** cbhne w0, wzr, .L275
** b taken
** b not_taken
*/
/*
** u16_x0_ne_0:
** cbheq w0, wzr, .L277
** b taken
** b not_taken
*/
/*
** u16_x0_ult_0:
** b not_taken
*/
/*
** u16_x0_ule_0:
** cbhne w0, wzr, .L280
** b taken
** b not_taken
*/
/*
** u16_x0_ugt_0:
** cbheq w0, wzr, .L282
** b taken
** b not_taken
*/
/*
** u16_x0_uge_0:
** b taken
*/
/*
** i16_x0_slt_0:
-** cbhlt w0, wzr, .L286
+** tbnz w0, #15, .L286
** b not_taken
** b taken
*/
/*
** i16_x0_sle_0:
** cbhle w0, wzr, .L289
** b not_taken
** b taken
*/
/*
** i16_x0_sgt_0:
** cbhle w0, wzr, .L291
** b taken
** b not_taken
*/
/*
** i16_x0_sge_0:
-** cbhlt w0, wzr, .L293
+** tbnz w0, #15, .L293
** b taken
** b not_taken
*/
/*
** u32_x0_eq_0:
-** cbne w0, wzr, .L295
+** cbnz w0, .L295
** b taken
** b not_taken
*/
/*
** u32_x0_ne_0:
-** cbeq w0, wzr, .L297
+** cbz w0, .L297
** b taken
** b not_taken
*/
/*
** u32_x0_ult_0:
** b not_taken
*/
/*
** u32_x0_ule_0:
-** cbne w0, wzr, .L300
+** cbnz w0, .L300
** b taken
** b not_taken
*/
/*
** u32_x0_ugt_0:
-** cbeq w0, wzr, .L302
+** cbz w0, .L302
** b taken
** b not_taken
*/
/*
** u32_x0_uge_0:
** b taken
*/
/*
** i32_x0_slt_0:
-** cblt w0, wzr, .L306
+** tbnz w0, #31, .L306
** b not_taken
** b taken
*/
/*
** i32_x0_sle_0:
** cble w0, wzr, .L309
** b not_taken
** b taken
*/
/*
** i32_x0_sgt_0:
** cble w0, wzr, .L311
** b taken
** b not_taken
*/
/*
** i32_x0_sge_0:
-** cblt w0, wzr, .L313
+** tbnz w0, #31, .L313
** b taken
** b not_taken
*/
/*
** u64_x0_eq_0:
-** cbne x0, xzr, .L315
+** cbnz x0, .L315
** b taken
** b not_taken
*/
/*
** u64_x0_ne_0:
-** cbeq x0, xzr, .L317
+** cbz x0, .L317
** b taken
** b not_taken
*/
/*
** u64_x0_ult_0:
** b not_taken
*/
/*
** u64_x0_ule_0:
-** cbne x0, xzr, .L320
+** cbnz x0, .L320
** b taken
** b not_taken
*/
/*
** u64_x0_ugt_0:
-** cbeq x0, xzr, .L322
+** cbz x0, .L322
** b taken
** b not_taken
*/
/*
** u64_x0_uge_0:
** b taken
*/
/*
** i64_x0_slt_0:
-** cblt x0, xzr, .L326
+** tbnz x0, #63, .L326
** b not_taken
** b taken
*/
/*
** i64_x0_sle_0:
** cble x0, xzr, .L329
** b not_taken
** b taken
*/
/*
** i64_x0_sgt_0:
** cble x0, xzr, .L331
** b taken
** b not_taken
*/
/*
** i64_x0_sge_0:
-** cblt x0, xzr, .L333
+** tbnz x0, #63, .L333
** b taken
** b not_taken
*/
--
2.45.2