[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[PATCH 2/2] x86/entry: Delete RESTORE_ALL



There is only a single user now, and it's the one odd case.  Inline and
simplify it to just the compat case.

No functional change.

Signed-off-by: Andrew Cooper <andrew.cooper3@xxxxxxxxxx>
---
CC: Jan Beulich <JBeulich@xxxxxxxx>
CC: Roger Pau Monné <roger.pau@xxxxxxxxxx>
CC: Wei Liu <wl@xxxxxxx>

In principle we want to delay the %rsp adjustment until after VERW, but that
turns disp8's into disp32's, making the overall code size larger.  This path
is only for 32bit PV guests anyway, which are well on their way to fully
obsolete.
---
 xen/arch/x86/include/asm/asm_defns.h | 48 ----------------------------
 xen/arch/x86/x86_64/compat/entry.S   | 18 ++++++++++-
 2 files changed, 17 insertions(+), 49 deletions(-)

diff --git a/xen/arch/x86/include/asm/asm_defns.h 
b/xen/arch/x86/include/asm/asm_defns.h
index ec10a8e1dfc6..524ed05d87f6 100644
--- a/xen/arch/x86/include/asm/asm_defns.h
+++ b/xen/arch/x86/include/asm/asm_defns.h
@@ -266,54 +266,6 @@ static always_inline void stac(void)
         xor   %r15d, %r15d
 .endm
 
-#define LOAD_ONE_REG(reg, compat) \
-.if !(compat); \
-        movq  UREGS_r##reg(%rsp),%r##reg; \
-.else; \
-        movl  UREGS_r##reg(%rsp),%e##reg; \
-.endif
-
-/*
- * Restore all previously saved registers.
- *
- * @adj: extra stack pointer adjustment to be folded into the adjustment done
- *       anyway at the end of the macro
- * @compat: R8-R15 don't need reloading, but they are clobbered for added
- *          safety against information leaks.
- */
-.macro RESTORE_ALL adj=0, compat=0
-.if !\compat
-        movq  UREGS_r15(%rsp), %r15
-        movq  UREGS_r14(%rsp), %r14
-        movq  UREGS_r13(%rsp), %r13
-        movq  UREGS_r12(%rsp), %r12
-.else
-        xor %r15d, %r15d
-        xor %r14d, %r14d
-        xor %r13d, %r13d
-        xor %r12d, %r12d
-.endif
-        LOAD_ONE_REG(bp, \compat)
-        LOAD_ONE_REG(bx, \compat)
-.if !\compat
-        movq  UREGS_r11(%rsp),%r11
-        movq  UREGS_r10(%rsp),%r10
-        movq  UREGS_r9(%rsp),%r9
-        movq  UREGS_r8(%rsp),%r8
-.else
-        xor %r11d, %r11d
-        xor %r10d, %r10d
-        xor %r9d, %r9d
-        xor %r8d, %r8d
-.endif
-        LOAD_ONE_REG(ax, \compat)
-        LOAD_ONE_REG(cx, \compat)
-        LOAD_ONE_REG(dx, \compat)
-        LOAD_ONE_REG(si, \compat)
-        LOAD_ONE_REG(di, \compat)
-        subq  $-(UREGS_error_code-UREGS_r15+\adj), %rsp
-.endm
-
 /*
  * POP GPRs from a UREGS_* frame on the stack.  Does not modify flags.
  *
diff --git a/xen/arch/x86/x86_64/compat/entry.S 
b/xen/arch/x86/x86_64/compat/entry.S
index 631f4f272ac3..99d6dec296c6 100644
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -158,7 +158,23 @@ FUNC(compat_restore_all_guest)
         /* WARNING! `ret`, `call *`, `jmp *` not safe beyond this point. */
         SPEC_CTRL_EXIT_TO_PV    /* Req: a=spec_ctrl %rsp=regs/cpuinfo, Clob: 
cd */
 
-        RESTORE_ALL adj=8, compat=1
+        /* Opencoded POP_GPRS, restoring only the 32bit registers. */
+        xor   %r15d, %r15d
+        xor   %r14d, %r14d
+        xor   %r13d, %r13d
+        xor   %r12d, %r12d
+        mov   UREGS_rbp(%rsp), %ebp
+        mov   UREGS_rbx(%rsp), %ebx
+        xor   %r11d, %r11d
+        xor   %r10d, %r10d
+        xor   %r9d,  %r9d
+        xor   %r8d,  %r8d
+        mov   UREGS_rax(%rsp), %eax
+        mov   UREGS_rcx(%rsp), %ecx
+        mov   UREGS_rdx(%rsp), %edx
+        mov   UREGS_rsi(%rsp), %esi
+        mov   UREGS_rdi(%rsp), %edi
+        sub   $-(UREGS_rip-UREGS_r15), %rsp
 
         /* Account for ev/ec having already been popped off the stack. */
         SPEC_CTRL_COND_VERW \
-- 
2.30.2




 


Rackspace

Lists.xenproject.org is hosted with RackSpace, monitoring our
servers 24x7x365 and backed by RackSpace's Fanatical Support®.