On Mon, 3 Feb 2020 at 14:47, Richard Henderson <richard.hender...@linaro.org> wrote: > > Split this helper out of msr_mask in translate.c. At the same time, > transform the negative reductive logic to positive accumulative logic. > It will be usable along the exception paths. > > Signed-off-by: Richard Henderson <richard.hender...@linaro.org> > --- > target/arm/internals.h | 24 ++++++++++++++++++++++++ > target/arm/translate.c | 17 +++-------------- > 2 files changed, 27 insertions(+), 14 deletions(-) > > diff --git a/target/arm/internals.h b/target/arm/internals.h > index 6be8b2d1a9..0569c96fd9 100644 > --- a/target/arm/internals.h > +++ b/target/arm/internals.h > @@ -1061,6 +1061,30 @@ static inline bool > arm_mmu_idx_is_stage1_of_2(ARMMMUIdx mmu_idx) > } > } > > +static inline uint32_t aarch32_cpsr_valid_mask(uint64_t features, > + const ARMISARegisters *id) > +{ > + uint32_t valid = CPSR_M | CPSR_AIF | CPSR_IL | CPSR_NZCV; > + > + if ((features >> ARM_FEATURE_V4T) & 1) { > + valid |= CPSR_T; > + } > + if ((features >> ARM_FEATURE_V5) & 1) { > + valid |= CPSR_Q; /* V5TE in reality*/ > + } > + if ((features >> ARM_FEATURE_V6) & 1) { > + valid |= CPSR_E | CPSR_GE; > + } > + if ((features >> ARM_FEATURE_THUMB2) & 1) { > + valid |= CPSR_IT; > + } > + if (isar_feature_jazelle(id)) { > + valid |= CPSR_J; > + }
This is a behaviour-change rather than just refactoring: we used to unconditionally allow the J bit through, and now we only do so if the isar feature bit is set. > + return valid; > +} > + > /* > * Parameters of a given virtual address, as extracted from the > * translation control register (TCR) for a given regime. > diff --git a/target/arm/translate.c b/target/arm/translate.c > index d58c328e08..032f7074cb 100644 > --- a/target/arm/translate.c > +++ b/target/arm/translate.c > @@ -2747,22 +2747,11 @@ static uint32_t msr_mask(DisasContext *s, int flags, > int spsr) > mask |= 0xff000000; > > /* Mask out undefined bits. */ > - mask &= ~CPSR_RESERVED; > - if (!arm_dc_feature(s, ARM_FEATURE_V4T)) { > - mask &= ~CPSR_T; > - } > - if (!arm_dc_feature(s, ARM_FEATURE_V5)) { > - mask &= ~CPSR_Q; /* V5TE in reality*/ > - } > - if (!arm_dc_feature(s, ARM_FEATURE_V6)) { > - mask &= ~(CPSR_E | CPSR_GE); > - } > - if (!arm_dc_feature(s, ARM_FEATURE_THUMB2)) { > - mask &= ~CPSR_IT; > - } > + mask &= aarch32_cpsr_valid_mask(s->features, s->isar); > + > /* Mask out execution state and reserved bits. */ This comment no longer matches the code it's referring to. > if (!spsr) { > - mask &= ~(CPSR_EXEC | CPSR_RESERVED); > + mask &= ~CPSR_EXEC; > } > /* Mask out privileged bits. */ > if (IS_USER(s)) > -- Otherwise Reviewed-by: Peter Maydell <peter.mayd...@linaro.org> thanks -- PMM