|
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [Xen-changelog] [xen master] x86: use optimal NOPs to fill the SMEP/SMAP placeholders
commit 01a0bd0a7d72be638a359db3f8cf551123467d29
Author: Jan Beulich <jbeulich@xxxxxxxx>
AuthorDate: Fri May 13 18:13:54 2016 +0100
Commit: Andrew Cooper <andrew.cooper3@xxxxxxxxxx>
CommitDate: Fri May 13 18:15:55 2016 +0100
x86: use optimal NOPs to fill the SMEP/SMAP placeholders
Alternatives patching code picks the most suitable NOPs for the
running system, so simply use it to replace the pre-populated ones.
Use an arbitrary, always available feature to key off from, but
hide this behind the new X86_FEATURE_ALWAYS.
Signed-off-by: Jan Beulich <jbeulich@xxxxxxxx>
Reviewed-by: Andrew Cooper <andrew.cooper3@xxxxxxxxxx>
Release-acked-by: Wei Liu <wei.liu2@xxxxxxxxxx>
---
xen/arch/x86/x86_64/compat/entry.S | 8 ++------
xen/include/asm-x86/asm_defns.h | 2 ++
xen/include/asm-x86/cpufeature.h | 3 +++
3 files changed, 7 insertions(+), 6 deletions(-)
diff --git a/xen/arch/x86/x86_64/compat/entry.S
b/xen/arch/x86/x86_64/compat/entry.S
index 52518c5..2723455 100644
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -175,12 +175,7 @@ compat_bad_hypercall:
ENTRY(compat_restore_all_guest)
ASSERT_INTERRUPTS_DISABLED
.Lcr4_orig:
- ASM_NOP8 /* testb $3,UREGS_cs(%rsp) */
- ASM_NOP2 /* jpe .Lcr4_alt_end */
- ASM_NOP8 /* mov CPUINFO_cr4...(%rsp), %rax */
- ASM_NOP6 /* and $..., %rax */
- ASM_NOP8 /* mov %rax, CPUINFO_cr4...(%rsp) */
- ASM_NOP3 /* mov %rax, %cr4 */
+ .skip (.Lcr4_alt_end - .Lcr4_alt) - (. - .Lcr4_orig), 0x90
.Lcr4_orig_end:
.pushsection .altinstr_replacement, "ax"
.Lcr4_alt:
@@ -192,6 +187,7 @@ ENTRY(compat_restore_all_guest)
mov %rax, %cr4
.Lcr4_alt_end:
.section .altinstructions, "a"
+ altinstruction_entry .Lcr4_orig, .Lcr4_orig, X86_FEATURE_ALWAYS, 12, 0
altinstruction_entry .Lcr4_orig, .Lcr4_alt, X86_FEATURE_SMEP, \
(.Lcr4_orig_end - .Lcr4_orig), \
(.Lcr4_alt_end - .Lcr4_alt)
diff --git a/xen/include/asm-x86/asm_defns.h b/xen/include/asm-x86/asm_defns.h
index 297bfdb..963e6ea 100644
--- a/xen/include/asm-x86/asm_defns.h
+++ b/xen/include/asm-x86/asm_defns.h
@@ -204,6 +204,7 @@ void ret_from_intr(void);
662: __ASM_##op; \
.popsection; \
.pushsection .altinstructions, "a"; \
+ altinstruction_entry 661b, 661b, X86_FEATURE_ALWAYS, 3, 0; \
altinstruction_entry 661b, 662b, X86_FEATURE_SMAP, 3, 3; \
.popsection
@@ -215,6 +216,7 @@ void ret_from_intr(void);
.pushsection .altinstr_replacement, "ax"; \
668: call cr4_pv32_restore; \
.section .altinstructions, "a"; \
+ altinstruction_entry 667b, 667b, X86_FEATURE_ALWAYS, 5, 0; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMEP, 5, 5; \
altinstruction_entry 667b, 668b, X86_FEATURE_SMAP, 5, 5; \
.popsection
diff --git a/xen/include/asm-x86/cpufeature.h b/xen/include/asm-x86/cpufeature.h
index 97c7e9e..9c49206 100644
--- a/xen/include/asm-x86/cpufeature.h
+++ b/xen/include/asm-x86/cpufeature.h
@@ -30,6 +30,9 @@
#define cpufeat_bit(idx) ((idx) % 32)
#define cpufeat_mask(idx) (_AC(1, U) << cpufeat_bit(idx))
+/* An alias of a feature we know is always going to be present. */
+#define X86_FEATURE_ALWAYS X86_FEATURE_LM
+
#if !defined(__ASSEMBLY__) && !defined(X86_FEATURES_ONLY)
#include <xen/bitops.h>
--
generated by git-patchbot for /home/xen/git/xen.git#master
_______________________________________________
Xen-changelog mailing list
Xen-changelog@xxxxxxxxxxxxx
http://lists.xensource.com/xen-changelog
|
![]() |
Lists.xenproject.org is hosted with RackSpace, monitoring our |