Alternatives patching code picks the most suitable NOPs for the
running system, so simply use it to replace the pre-populated ones.
Use an arbitrary, always available feature to key off from, but
hide this behind the new X86_FEATURE_ALWAYS.
Signed-off-by: Jan Beulich
---
v2: Introduce and use X86_FEATURE_ALWAYS.
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -185,6 +185,7 @@ ENTRY(compat_restore_all_guest)
mov %rax, %cr4
.Lcr4_alt_end:
.section .altinstructions, "a"
+altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, 12, 0
altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, 12, \
(.Lcr4_alt_end - .Lcr4_alt)
altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMAP, 12, \
--- a/xen/include/asm-x86/asm_defns.h
+++ b/xen/include/asm-x86/asm_defns.h
@@ -204,6 +204,7 @@ void ret_from_intr(void);
662: __ASM_##op; \
.popsection; \
.pushsection .altinstructions, "a";\
+altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \
altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3; \
.popsection
@@ -215,6 +216,7 @@ void ret_from_intr(void);
.pushsection .altinstr_replacement, "ax"; \
668: call cr4_pv32_restore;\
.section .altinstructions, "a";\
+altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5; \
.popsection
--- a/xen/include/asm-x86/cpufeature.h
+++ b/xen/include/asm-x86/cpufeature.h
@@ -162,6 +162,9 @@
#define cpufeat_bit(idx) ((idx) % 32)
#define cpufeat_mask(idx) (_AC(1, U) << cpufeat_bit(idx))
+/* An alias of a feature we know is always going to be present. */
+#define X86_FEATURE_ALWAYS X86_FEATURE_LM
+
#if !defined(__ASSEMBLY__) && !defined(X86_FEATURES_ONLY)
#include
x86: use optimal NOPs to fill the SMEP/SMAP placeholders
Alternatives patching code picks the most suitable NOPs for the
running system, so simply use it to replace the pre-populated ones.
Use an arbitrary, always available feature to key off from, but
hide this behind the new X86_FEATURE_ALWAYS.
Signed-off-by: Jan Beulich
---
v2: Introduce and use X86_FEATURE_ALWAYS.
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -185,6 +185,7 @@ ENTRY(compat_restore_all_guest)
mov %rax, %cr4
.Lcr4_alt_end:
.section .altinstructions, "a"
+altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, 12, 0
altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, 12, \
(.Lcr4_alt_end - .Lcr4_alt)
altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMAP, 12, \
--- a/xen/include/asm-x86/asm_defns.h
+++ b/xen/include/asm-x86/asm_defns.h
@@ -204,6 +204,7 @@ void ret_from_intr(void);
662: __ASM_##op; \
.popsection; \
.pushsection .altinstructions, "a";\
+altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \
altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3; \
.popsection
@@ -215,6 +216,7 @@ void ret_from_intr(void);
.pushsection .altinstr_replacement, "ax"; \
668: call cr4_pv32_restore;\
.section .altinstructions, "a";\
+altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5; \
.popsection
--- a/xen/include/asm-x86/cpufeature.h
+++ b/xen/include/asm-x86/cpufeature.h
@@ -162,6 +162,9 @@
#define cpufeat_bit(idx) ((idx) % 32)
#define cpufeat_mask(idx) (_AC(1, U) << cpufeat_bit(idx))
+/* An alias of a feature we know is always going to be present. */
+#define X86_FEATURE_ALWAYS X86_FEATURE_LM
+
#if !defined(__ASSEMBLY__) && !defined(X86_FEATURES_ONLY)
#include
___
Xen-devel mailing list
Xen-devel@lists.xen.org
http://lists.xen.org/xen-devel