VMX: move various uses of UD2 out of fast paths ... at once making conditional forward jumps, which are statically predicted to be not taken, only used for the unlikely (error) cases. Signed-off-by: Jan Beulich --- v2: Fix #UD recovery for INVVPID (unintended flow change spotted by Andrew Cooper). --- a/xen/include/asm-x86/asm_defns.h +++ b/xen/include/asm-x86/asm_defns.h @@ -67,6 +67,30 @@ void ret_from_intr(void); #define ASSERT_NOT_IN_ATOMIC #endif +#else + +#ifdef __clang__ /* clang's builtin assember can't do .subsection */ + +#define UNLIKELY_START_SECTION ".pushsection .fixup,\"ax\"" +#define UNLIKELY_END_SECTION ".popsection" + +#else + +#define UNLIKELY_START_SECTION ".subsection 1" +#define UNLIKELY_END_SECTION ".subsection 0" + +#endif + +#define UNLIKELY_START(cond, tag) \ + "j" #cond " .Lunlikely%=.tag;\n\t" \ + UNLIKELY_START_SECTION "\n" \ + ".Lunlikely%=.tag:" + +#define UNLIKELY_END(tag) \ + "jmp .Llikely%=.tag;\n\t" \ + UNLIKELY_END_SECTION "\n" \ + ".Llikely%=.tag:" + #endif #endif /* __X86_ASM_DEFNS_H__ */ --- a/xen/include/asm-x86/hvm/vmx/vmx.h +++ b/xen/include/asm-x86/hvm/vmx/vmx.h @@ -285,7 +285,9 @@ static inline void __vmptrld(u64 addr) asm volatile ( VMPTRLD_OPCODE MODRM_EAX_06 /* CF==1 or ZF==1 --> crash (ud2) */ - "ja 1f ; ud2 ; 1:\n" + UNLIKELY_START(be, vmptrld) + "\tud2\n" + UNLIKELY_END_SECTION : : "a" (&addr) : "memory"); @@ -296,7 +298,9 @@ static inline void __vmpclear(u64 addr) asm volatile ( VMCLEAR_OPCODE MODRM_EAX_06 /* CF==1 or ZF==1 --> crash (ud2) */ - "ja 1f ; ud2 ; 1:\n" + UNLIKELY_START(be, vmclear) + "\tud2\n" + UNLIKELY_END_SECTION : : "a" (&addr) : "memory"); @@ -309,7 +313,9 @@ static inline unsigned long __vmread(uns asm volatile ( VMREAD_OPCODE MODRM_EAX_ECX /* CF==1 or ZF==1 --> crash (ud2) */ - "ja 1f ; ud2 ; 1:\n" + UNLIKELY_START(be, vmread) + "\tud2\n" + UNLIKELY_END_SECTION : "=c" (ecx) : "a" (field) : "memory"); @@ -322,7 +328,9 @@ static inline void __vmwrite(unsigned lo asm volatile ( VMWRITE_OPCODE MODRM_EAX_ECX /* CF==1 or ZF==1 --> crash (ud2) */ - "ja 1f ; ud2 ; 1:\n" + UNLIKELY_START(be, vmwrite) + "\tud2\n" + UNLIKELY_END_SECTION : : "a" (field) , "c" (value) : "memory"); @@ -360,7 +368,9 @@ static inline void __invept(int type, u6 asm volatile ( INVEPT_OPCODE MODRM_EAX_08 /* CF==1 or ZF==1 --> crash (ud2) */ - "ja 1f ; ud2 ; 1:\n" + UNLIKELY_START(be, invept) + "\tud2\n" + UNLIKELY_END_SECTION : : "a" (&operand), "c" (type) : "memory" ); @@ -377,7 +387,10 @@ static inline void __invvpid(int type, u /* Fix up #UD exceptions which occur when TLBs are flushed before VMXON. */ asm volatile ( "1: " INVVPID_OPCODE MODRM_EAX_08 /* CF==1 or ZF==1 --> crash (ud2) */ - "ja 2f ; ud2 ; 2:\n" + UNLIKELY_START(be, invvpid) + "\tud2\n" + UNLIKELY_END_SECTION "\n" + "2:" _ASM_EXTABLE(1b, 2b) : : "a" (&operand), "c" (type)