[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [Xen-devel] [PATCH 7/7] x86/asm: Clobber %r{8..15} on exit to 32bit PV guests
In the presence of bugs such as XSA-214 where a 32bit PV guest can get its hands on a long mode segment, this change prevents register content leaking between domains. Signed-off-by: Andrew Cooper <andrew.cooper3@xxxxxxxxxx> --- CC: Jan Beulich <JBeulich@xxxxxxxx> --- xen/include/asm-x86/asm_defns.h | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/xen/include/asm-x86/asm_defns.h b/xen/include/asm-x86/asm_defns.h index 11306d1..4b5891f 100644 --- a/xen/include/asm-x86/asm_defns.h +++ b/xen/include/asm-x86/asm_defns.h @@ -325,7 +325,8 @@ static always_inline void stac(void) * * @adj: extra stack pointer adjustment to be folded into the adjustment done * anyway at the end of the macro - * @compat: R8-R15 don't need reloading + * @compat: R8-R15 don't need reloading, but they are clobbered for added + * safety against information leaks. */ .macro RESTORE_ALL adj=0 compat=0 .if !\compat @@ -366,6 +367,16 @@ static always_inline void stac(void) LOAD_ONE_REG(bp, \compat) LOAD_ONE_REG(bx, \compat) subq $-(UREGS_error_code-UREGS_r15+\adj), %rsp +.if \compat + xor %r8d, %r8d + xor %r9d, %r9d + xor %r10d, %r10d + xor %r11d, %r11d + xor %r12d, %r12d + xor %r13d, %r13d + xor %r14d, %r14d + xor %r15d, %r15d +.endif .endm #endif -- 2.1.4 _______________________________________________ Xen-devel mailing list Xen-devel@xxxxxxxxxxxxx https://lists.xen.org/xen-devel
|
Lists.xenproject.org is hosted with RackSpace, monitoring our |