From: "David A. Long" <dave.l...@linaro.org>

Certain instructions are hard to execute correctly out-of-line (as in
kprobes).  Test functions are added to insn.[hc] to identify these.  The
instructions include any that use PC-relative addressing, change the PC,
or change interrupt masking. For efficiency and simplicity test
functions are also added for small collections of related instructions.

Signed-off-by: David A. Long <dave.l...@linaro.org>
---
 arch/arm64/include/asm/insn.h | 36 ++++++++++++++++++++++++++++++++++++
 arch/arm64/kernel/insn.c      | 34 ++++++++++++++++++++++++++++++++++
 2 files changed, 70 insertions(+)

diff --git a/arch/arm64/include/asm/insn.h b/arch/arm64/include/asm/insn.h
index 30e50eb..497f7a2 100644
--- a/arch/arm64/include/asm/insn.h
+++ b/arch/arm64/include/asm/insn.h
@@ -120,6 +120,29 @@ enum aarch64_insn_register {
        AARCH64_INSN_REG_SP = 31  /* Stack pointer: as load/store base reg */
 };
 
+enum aarch64_insn_special_register {
+       AARCH64_INSN_SPCLREG_SPSR_EL1   = 0xC200,
+       AARCH64_INSN_SPCLREG_ELR_EL1    = 0xC201,
+       AARCH64_INSN_SPCLREG_SP_EL0     = 0xC208,
+       AARCH64_INSN_SPCLREG_SPSEL      = 0xC210,
+       AARCH64_INSN_SPCLREG_CURRENTEL  = 0xC212,
+       AARCH64_INSN_SPCLREG_DAIF       = 0xDA11,
+       AARCH64_INSN_SPCLREG_NZCV       = 0xDA10,
+       AARCH64_INSN_SPCLREG_FPCR       = 0xDA20,
+       AARCH64_INSN_SPCLREG_DSPSR_EL0  = 0xDA28,
+       AARCH64_INSN_SPCLREG_DLR_EL0    = 0xDA29,
+       AARCH64_INSN_SPCLREG_SPSR_EL2   = 0xE200,
+       AARCH64_INSN_SPCLREG_ELR_EL2    = 0xE201,
+       AARCH64_INSN_SPCLREG_SP_EL1     = 0xE208,
+       AARCH64_INSN_SPCLREG_SPSR_INQ   = 0xE218,
+       AARCH64_INSN_SPCLREG_SPSR_ABT   = 0xE219,
+       AARCH64_INSN_SPCLREG_SPSR_UND   = 0xE21A,
+       AARCH64_INSN_SPCLREG_SPSR_FIQ   = 0xE21B,
+       AARCH64_INSN_SPCLREG_SPSR_EL3   = 0xF200,
+       AARCH64_INSN_SPCLREG_ELR_EL3    = 0xF201,
+       AARCH64_INSN_SPCLREG_SP_EL2     = 0xF210
+};
+
 enum aarch64_insn_variant {
        AARCH64_INSN_VARIANT_32BIT,
        AARCH64_INSN_VARIANT_64BIT
@@ -223,8 +246,13 @@ static __always_inline bool aarch64_insn_is_##abbr(u32 
code) \
 static __always_inline u32 aarch64_insn_get_##abbr##_value(void) \
 { return (val); }
 
+__AARCH64_INSN_FUNCS(adr_adrp, 0x1F000000, 0x10000000)
+__AARCH64_INSN_FUNCS(prfm_lit, 0xFF000000, 0xD8000000)
 __AARCH64_INSN_FUNCS(str_reg,  0x3FE0EC00, 0x38206800)
 __AARCH64_INSN_FUNCS(ldr_reg,  0x3FE0EC00, 0x38606800)
+__AARCH64_INSN_FUNCS(ldr_lit,  0xBF000000, 0x18000000)
+__AARCH64_INSN_FUNCS(ldrsw_lit,        0xFF000000, 0x98000000)
+__AARCH64_INSN_FUNCS(exclusive,        0x3F800000, 0x08000000)
 __AARCH64_INSN_FUNCS(stp_post, 0x7FC00000, 0x28800000)
 __AARCH64_INSN_FUNCS(ldp_post, 0x7FC00000, 0x28C00000)
 __AARCH64_INSN_FUNCS(stp_pre,  0x7FC00000, 0x29800000)
@@ -273,10 +301,15 @@ __AARCH64_INSN_FUNCS(svc, 0xFFE0001F, 0xD4000001)
 __AARCH64_INSN_FUNCS(hvc,      0xFFE0001F, 0xD4000002)
 __AARCH64_INSN_FUNCS(smc,      0xFFE0001F, 0xD4000003)
 __AARCH64_INSN_FUNCS(brk,      0xFFE0001F, 0xD4200000)
+__AARCH64_INSN_FUNCS(exception,        0xFF000000, 0xD4000000)
 __AARCH64_INSN_FUNCS(hint,     0xFFFFF01F, 0xD503201F)
 __AARCH64_INSN_FUNCS(br,       0xFFFFFC1F, 0xD61F0000)
 __AARCH64_INSN_FUNCS(blr,      0xFFFFFC1F, 0xD63F0000)
 __AARCH64_INSN_FUNCS(ret,      0xFFFFFC1F, 0xD65F0000)
+__AARCH64_INSN_FUNCS(eret,     0xFFFFFFFF, 0xD69F03E0)
+__AARCH64_INSN_FUNCS(mrs,      0xFFF00000, 0xD5300000)
+__AARCH64_INSN_FUNCS(msr_imm,  0xFFF8F01F, 0xD500401F)
+__AARCH64_INSN_FUNCS(msr_reg,  0xFFF00000, 0xD5100000)
 
 #undef __AARCH64_INSN_FUNCS
 
@@ -286,6 +319,8 @@ bool aarch64_insn_is_branch_imm(u32 insn);
 int aarch64_insn_read(void *addr, u32 *insnp);
 int aarch64_insn_write(void *addr, u32 insn);
 enum aarch64_insn_encoding_class aarch64_get_insn_class(u32 insn);
+bool aarch64_insn_uses_literal(u32 insn);
+bool aarch64_insn_is_branch(u32 insn);
 u64 aarch64_insn_decode_immediate(enum aarch64_insn_imm_type type, u32 insn);
 u32 aarch64_insn_encode_immediate(enum aarch64_insn_imm_type type,
                                  u32 insn, u64 imm);
@@ -367,6 +402,7 @@ bool aarch32_insn_is_wide(u32 insn);
 #define A32_RT_OFFSET  12
 #define A32_RT2_OFFSET  0
 
+u32 aarch64_insn_extract_system_reg(u32 insn);
 u32 aarch32_insn_extract_reg_num(u32 insn, int offset);
 u32 aarch32_insn_mcr_extract_opc2(u32 insn);
 u32 aarch32_insn_mcr_extract_crm(u32 insn);
diff --git a/arch/arm64/kernel/insn.c b/arch/arm64/kernel/insn.c
index 368c082..28c6110f 100644
--- a/arch/arm64/kernel/insn.c
+++ b/arch/arm64/kernel/insn.c
@@ -162,6 +162,32 @@ static bool __kprobes __aarch64_insn_hotpatch_safe(u32 
insn)
                aarch64_insn_is_nop(insn);
 }
 
+bool __kprobes aarch64_insn_uses_literal(u32 insn)
+{
+       /* ldr/ldrsw (literal), prfm */
+
+       return aarch64_insn_is_ldr_lit(insn) ||
+               aarch64_insn_is_ldrsw_lit(insn) ||
+               aarch64_insn_is_adr_adrp(insn) ||
+               aarch64_insn_is_prfm_lit(insn);
+}
+
+bool __kprobes aarch64_insn_is_branch(u32 insn)
+{
+       /* b, bl, cb*, tb*, b.cond, br, blr */
+
+       return aarch64_insn_is_b(insn) ||
+               aarch64_insn_is_bl(insn) ||
+               aarch64_insn_is_cbz(insn) ||
+               aarch64_insn_is_cbnz(insn) ||
+               aarch64_insn_is_tbz(insn) ||
+               aarch64_insn_is_tbnz(insn) ||
+               aarch64_insn_is_ret(insn) ||
+               aarch64_insn_is_br(insn) ||
+               aarch64_insn_is_blr(insn) ||
+               aarch64_insn_is_bcond(insn);
+}
+
 /*
  * ARM Architecture Reference Manual for ARMv8 Profile-A, Issue A.a
  * Section B2.6.5 "Concurrent modification and execution of instructions":
@@ -1175,6 +1201,14 @@ u32 aarch64_set_branch_offset(u32 insn, s32 offset)
        BUG();
 }
 
+/*
+ * Extract the Op/CR data from a msr/mrs instruction.
+ */
+u32 aarch64_insn_extract_system_reg(u32 insn)
+{
+       return (insn & 0x1FFFE0) >> 5;
+}
+
 bool aarch32_insn_is_wide(u32 insn)
 {
        return insn >= 0xe800;
-- 
2.5.0

Reply via email to