[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Xen-devel] [PATCH v3 5/9] x86: Temporary disable SMAP to legally access user pages in kernel mode



Use STAC/CLAC to temporarily disable SMAP to allow legal accesses to
user pages in kernel mode

Signed-off-by: Feng Wu <feng.wu@xxxxxxxxx>
---
 xen/arch/x86/traps.c                | 2 ++
 xen/arch/x86/usercopy.c             | 6 ++++++
 xen/arch/x86/x86_64/compat/entry.S  | 2 ++
 xen/arch/x86/x86_64/entry.S         | 2 ++
 xen/include/asm-x86/uaccess.h       | 4 ++++
 xen/include/asm-x86/x86_64/system.h | 2 ++
 6 files changed, 18 insertions(+)

diff --git a/xen/arch/x86/traps.c b/xen/arch/x86/traps.c
index ed4ae2d..1b0dc1a 100644
--- a/xen/arch/x86/traps.c
+++ b/xen/arch/x86/traps.c
@@ -3754,6 +3754,8 @@ unsigned long do_get_debugreg(int reg)
 
 void asm_domain_crash_synchronous(unsigned long addr)
 {
+    clac();
+
     if ( addr == 0 )
         addr = this_cpu(last_extable_addr);
 
diff --git a/xen/arch/x86/usercopy.c b/xen/arch/x86/usercopy.c
index b79202b..af96bf4 100644
--- a/xen/arch/x86/usercopy.c
+++ b/xen/arch/x86/usercopy.c
@@ -15,6 +15,7 @@ unsigned long __copy_to_user_ll(void __user *to, const void 
*from, unsigned n)
     unsigned long __d0, __d1, __d2, __n = n;
 
     asm volatile (
+        ASM_STAC"\n"
         "    cmp  $"STR(2*BYTES_PER_LONG-1)",%0\n"
         "    jbe  1f\n"
         "    mov  %1,%0\n"
@@ -30,6 +31,7 @@ unsigned long __copy_to_user_ll(void __user *to, const void 
*from, unsigned n)
         "    mov  %3,%0\n"
         "1:  rep movsb\n" /* ...remainder copied as bytes */
         "2:\n"
+        ASM_CLAC"\n"
         ".section .fixup,\"ax\"\n"
         "5:  add %3,%0\n"
         "    jmp 2b\n"
@@ -52,6 +54,7 @@ __copy_from_user_ll(void *to, const void __user *from, 
unsigned n)
     unsigned long __d0, __d1, __d2, __n = n;
 
     asm volatile (
+        ASM_STAC"\n"
         "    cmp  $"STR(2*BYTES_PER_LONG-1)",%0\n"
         "    jbe  1f\n"
         "    mov  %1,%0\n"
@@ -67,6 +70,7 @@ __copy_from_user_ll(void *to, const void __user *from, 
unsigned n)
         "    mov  %3,%0\n"
         "1:  rep; movsb\n" /* ...remainder copied as bytes */
         "2:\n"
+        ASM_CLAC"\n"
         ".section .fixup,\"ax\"\n"
         "5:  add %3,%0\n"
         "    jmp 6f\n"
@@ -114,10 +118,12 @@ copy_to_user(void __user *to, const void *from, unsigned 
n)
 do {                                                                   \
        long __d0;                                                      \
        __asm__ __volatile__(                                           \
+               ASM_STAC"\n"                                            \
                "0:     rep; stosl\n"                                   \
                "       movl %2,%0\n"                                   \
                "1:     rep; stosb\n"                                   \
                "2:\n"                                                  \
+               ASM_CLAC"\n"                                            \
                ".section .fixup,\"ax\"\n"                              \
                "3:     lea 0(%2,%0,4),%0\n"                            \
                "       jmp 2b\n"                                       \
diff --git a/xen/arch/x86/x86_64/compat/entry.S 
b/xen/arch/x86/x86_64/compat/entry.S
index ac594c9..298c1a9 100644
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -266,6 +266,7 @@ ENTRY(compat_int80_direct_trap)
 /* On return only %rbx and %rdx are guaranteed non-clobbered.            */
 compat_create_bounce_frame:
         ASSERT_INTERRUPTS_ENABLED
+        ASM_STAC
         mov   %fs,%edi
         testb $2,UREGS_cs+8(%rsp)
         jz    1f
@@ -337,6 +338,7 @@ __UNLIKELY_END(compat_bounce_null_selector)
         movl  %eax,UREGS_cs+8(%rsp)
         movl  TRAPBOUNCE_eip(%rdx),%eax
         movl  %eax,UREGS_rip+8(%rsp)
+        ASM_CLAC
         ret
 .section .fixup,"ax"
 .Lfx13:
diff --git a/xen/arch/x86/x86_64/entry.S b/xen/arch/x86/x86_64/entry.S
index 5f46803..c30d360 100644
--- a/xen/arch/x86/x86_64/entry.S
+++ b/xen/arch/x86/x86_64/entry.S
@@ -379,6 +379,7 @@ __UNLIKELY_END(create_bounce_frame_bad_sp)
         movb  TRAPBOUNCE_flags(%rdx),%cl
         subq  $40,%rsi
         movq  UREGS_ss+8(%rsp),%rax
+        ASM_STAC
 .Lft2:  movq  %rax,32(%rsi)             # SS
         movq  UREGS_rsp+8(%rsp),%rax
 .Lft3:  movq  %rax,24(%rsi)             # RSP
@@ -439,6 +440,7 @@ UNLIKELY_START(z, create_bounce_frame_bad_bounce_ip)
         jmp   asm_domain_crash_synchronous  /* Does not return */
 __UNLIKELY_END(create_bounce_frame_bad_bounce_ip)
         movq  %rax,UREGS_rip+8(%rsp)
+        ASM_CLAC
         ret
         _ASM_EXTABLE(.Lft2,  dom_crash_sync_extable)
         _ASM_EXTABLE(.Lft3,  dom_crash_sync_extable)
diff --git a/xen/include/asm-x86/uaccess.h b/xen/include/asm-x86/uaccess.h
index 88b4ba2..ce1af4a 100644
--- a/xen/include/asm-x86/uaccess.h
+++ b/xen/include/asm-x86/uaccess.h
@@ -147,8 +147,10 @@ struct __large_struct { unsigned long buf[100]; };
  */
 #define __put_user_asm(x, addr, err, itype, rtype, ltype, errret)      \
        __asm__ __volatile__(                                           \
+               ASM_STAC"\n"                                            \
                "1:     mov"itype" %"rtype"1,%2\n"                      \
                "2:\n"                                                  \
+               ASM_CLAC"\n"                                            \
                ".section .fixup,\"ax\"\n"                              \
                "3:     mov %3,%0\n"                                    \
                "       jmp 2b\n"                                       \
@@ -159,8 +161,10 @@ struct __large_struct { unsigned long buf[100]; };
 
 #define __get_user_asm(x, addr, err, itype, rtype, ltype, errret)      \
        __asm__ __volatile__(                                           \
+               ASM_STAC"\n"                                            \
                "1:     mov"itype" %2,%"rtype"1\n"                      \
                "2:\n"                                                  \
+               ASM_CLAC"\n"                                            \
                ".section .fixup,\"ax\"\n"                              \
                "3:     mov %3,%0\n"                                    \
                "       xor"itype" %"rtype"1,%"rtype"1\n"               \
diff --git a/xen/include/asm-x86/x86_64/system.h 
b/xen/include/asm-x86/x86_64/system.h
index 20f038b..b8394b9 100644
--- a/xen/include/asm-x86/x86_64/system.h
+++ b/xen/include/asm-x86/x86_64/system.h
@@ -13,8 +13,10 @@
  */
 #define __cmpxchg_user(_p,_o,_n,_isuff,_oppre,_regtype)                 \
     asm volatile (                                                      \
+        ASM_STAC"\n"                                                    \
         "1: lock; cmpxchg"_isuff" %"_oppre"2,%3\n"                      \
         "2:\n"                                                          \
+        ASM_CLAC"\n"                                                    \
         ".section .fixup,\"ax\"\n"                                      \
         "3:     movl $1,%1\n"                                           \
         "       jmp 2b\n"                                               \
-- 
1.8.3.1


_______________________________________________
Xen-devel mailing list
Xen-devel@xxxxxxxxxxxxx
http://lists.xen.org/xen-devel


 


Rackspace

Lists.xenproject.org is hosted with RackSpace, monitoring our
servers 24x7x365 and backed by RackSpace's Fanatical Support®.