Add ix86_any_return_p to check simple_return in a PARALLEL to support:
(jump_insn 39 38 40 5 (parallel [
(simple_return)
(unspec [
(const_int 0 [0])
] UNSPEC_SIMPLE_RETURN)
]) "/tmp/x.c":105 -1
(nil)
-> simple_return)
* config/i386/i386-expand.c (ix86_notrack_prefixed_insn_p):
Replace ANY_RETURN_P with ix86_any_return_p.
* config/i386/i386-features.c (rest_of_insert_endbranch):
Likewise.
* onfig/i386/i386-protos.h (ix86_any_return_p): New.
* config/i386/i386.c (ix86_any_return_p): New function.
(ix86_pad_returns): Replace ANY_RETURN_P with ix86_any_return_p.
(ix86_count_insn_bb): Likewise.
(ix86_pad_short_function): Likewise.
---
gcc/config/i386/i386-expand.c | 2 +-
gcc/config/i386/i386-features.c | 2 +-
gcc/config/i386/i386-protos.h | 2 ++
gcc/config/i386/i386.c | 17 ++++++++++++++---
4 files changed, 18 insertions(+), 5 deletions(-)
diff --git a/gcc/config/i386/i386-expand.c b/gcc/config/i386/i386-expand.c
index 371bbedd9a7..6dcd4554424 100644
--- a/gcc/config/i386/i386-expand.c
+++ b/gcc/config/i386/i386-expand.c
@@ -20143,7 +20143,7 @@ ix86_notrack_prefixed_insn_p (rtx_insn *insn)
if (JUMP_P (insn) && !flag_cet_switch)
{
rtx target = JUMP_LABEL (insn);
- if (target == NULL_RTX || ANY_RETURN_P (target))
+ if (target == NULL_RTX || ix86_any_return_p (target))
return false;
/* Check the jump is a switch table. */
diff --git a/gcc/config/i386/i386-features.c b/gcc/config/i386/i386-features.c
index 78fb373db6e..a1dd5dee42f 100644
--- a/gcc/config/i386/i386-features.c
+++ b/gcc/config/i386/i386-features.c
@@ -2030,7 +2030,7 @@ rest_of_insert_endbranch (void)
if (JUMP_P (insn) && flag_cet_switch)
{
rtx target = JUMP_LABEL (insn);
- if (target == NULL_RTX || ANY_RETURN_P (target))
+ if (target == NULL_RTX || ix86_any_return_p (target))
continue;
/* Check the jump is a switch table. */
diff --git a/gcc/config/i386/i386-protos.h b/gcc/config/i386/i386-protos.h
index 01732a225f4..5e6a07a6b60 100644
--- a/gcc/config/i386/i386-protos.h
+++ b/gcc/config/i386/i386-protos.h
@@ -210,6 +210,8 @@ extern void ix86_move_vector_high_sse_to_mmx (rtx);
extern void ix86_split_mmx_pack (rtx[], enum rtx_code);
extern void ix86_split_mmx_punpck (rtx[], bool);
+extern bool ix86_any_return_p (rtx);
+
#ifdef TREE_CODE
extern void init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, tree, int);
#endif /* TREE_CODE */
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index d433c3d33f2..80d0cfe96d2 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -20632,6 +20632,17 @@ ix86_avoid_jump_mispredicts (void)
}
#endif
+/* Return true if RET is a return, simple_return or simple_return in
+ a PARALLEL. */
+
+bool
+ix86_any_return_p (rtx ret)
+{
+ return (ANY_RETURN_P (ret)
+ || (GET_CODE (ret) == PARALLEL
+ && GET_CODE (XVECEXP (ret, 0, 0)) == SIMPLE_RETURN));
+}
+
/* AMD Athlon works faster
when RET is not destination of conditional jump or directly preceded
by other jump instruction. We avoid the penalty by inserting NOP just
@@ -20649,7 +20660,7 @@ ix86_pad_returns (void)
rtx_insn *prev;
bool replace = false;
- if (!JUMP_P (ret) || !ANY_RETURN_P (PATTERN (ret))
+ if (!JUMP_P (ret) || !ix86_any_return_p (PATTERN (ret))
|| optimize_bb_for_size_p (bb))
continue;
for (prev = PREV_INSN (ret); prev; prev = PREV_INSN (prev))
@@ -20703,7 +20714,7 @@ ix86_count_insn_bb (basic_block bb)
{
/* Only happen in exit blocks. */
if (JUMP_P (insn)
- && ANY_RETURN_P (PATTERN (insn)))
+ && ix86_any_return_p (PATTERN (insn)))
break;
if (NONDEBUG_INSN_P (insn)
@@ -20776,7 +20787,7 @@ ix86_pad_short_function (void)
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
{
rtx_insn *ret = BB_END (e->src);
- if (JUMP_P (ret) && ANY_RETURN_P (PATTERN (ret)))
+ if (JUMP_P (ret) && ix86_any_return_p (PATTERN (ret)))
{
int insn_count = ix86_count_insn (e->src);
--
2.26.2