x86emul: minor cleanup Drop a redundant input constraint, correct a comment, and (re)move fix.insn_bytes adjustments (these aren't needed for custom stub invocations when the instruction placed in the stub can't raise #XF) plus a corresponding check_xmm_exn() invocation. Signed-off-by: Jan Beulich --- a/xen/arch/x86/x86_emulate/x86_emulate.c +++ b/xen/arch/x86/x86_emulate/x86_emulate.c @@ -5681,8 +5681,7 @@ x86_emulate( [eflags] "+g" (_regs.eflags), [tmp] "=&r" (dummy), "+m" (*mmvalp), "+m" (fic.exn_raised) - : [func] "rm" (stub.func), "a" (mmvalp), - [mask] "i" (EFLAGS_MASK)); + : "a" (mmvalp), [mask] "i" (EFLAGS_MASK)); put_stub(stub); check_xmm_exn(&fic); @@ -6086,7 +6085,7 @@ x86_emulate( case X86EMUL_OPC_F3(0x0f, 0x6f): /* movdqu xmm/m128,xmm */ case X86EMUL_OPC_VEX_F3(0x0f, 0x6f): /* vmovdqu {x,y}mm/mem,{x,y}mm */ case X86EMUL_OPC_66(0x0f, 0x7f): /* movdqa xmm,xmm/m128 */ - case X86EMUL_OPC_VEX_66(0x0f, 0x7f): /* vmovdqa {x,y}mm,{x,y}mm/m128 */ + case X86EMUL_OPC_VEX_66(0x0f, 0x7f): /* vmovdqa {x,y}mm,{x,y}mm/mem */ case X86EMUL_OPC_F3(0x0f, 0x7f): /* movdqu xmm,xmm/m128 */ case X86EMUL_OPC_VEX_F3(0x0f, 0x7f): /* vmovdqu {x,y}mm,{x,y}mm/mem */ movdqa: @@ -7022,7 +7021,6 @@ x86_emulate( if ( !mode_64bit() ) vex.w = 0; opc[1] = modrm & 0xc7; - fic.insn_bytes = PFX_BYTES + 2; opc[2] = 0xc3; copy_REX_VEX(opc, rex_prefix, vex); @@ -7035,6 +7033,7 @@ x86_emulate( opc = init_prefixes(stub); opc[0] = b; opc[1] = modrm; + fic.insn_bytes = PFX_BYTES + 2; /* Restore high bit of XMM destination. */ if ( sfence ) { @@ -7469,20 +7468,16 @@ x86_emulate( vex.w = 0; opc[1] = modrm & 0x38; opc[2] = imm1; - fic.insn_bytes = PFX_BYTES + 3; opc[3] = 0xc3; if ( vex.opcx == vex_none ) { /* Cover for extra prefix byte. */ --opc; - ++fic.insn_bytes; } copy_REX_VEX(opc, rex_prefix, vex); invoke_stub("", "", "=m" (dst.val) : "a" (&dst.val)); - put_stub(stub); - check_xmm_exn(&fic); ASSERT(!state->simd_size); dst.bytes = dst.type == OP_REG || b == 0x17 ? 4 : 1 << (b & 3);