Re: [Xen-devel] [PATCH 02/17] x86emul: re-order cases of main switch statement

2017-09-13 Thread George Dunlap
On Wed, Jun 21, 2017 at 12:59 PM, Jan Beulich  wrote:
> Re-store intended numerical ordering, which has become "violated"
> mostly by incremental additions where moving around bigger chunks did
> not seem advisable. One exception though at the very top of the
> switch(): Keeping the arithmetic ops together seems preferable over
> entirely strict ordering.

+1

> Additionally move a few macro definitions before their first uses (the
> placement is benign as long as those uses are themselves only macro
> definitions, but that's going to change when those macros have helpers
> broken out).
>
> No (intended) functional change.
>
> Signed-off-by: Jan Beulich 

As far as I can tell, the 'no functional change' is true:

Reviewed-by:  George Dunlap 

>
> --- a/xen/arch/x86/x86_emulate/x86_emulate.c
> +++ b/xen/arch/x86/x86_emulate/x86_emulate.c
> @@ -843,6 +843,27 @@ do{ asm volatile (
>  #define __emulate_1op_8byte(_op, _dst, _eflags)
>  #endif /* __i386__ */
>
> +#define fail_if(p)  \
> +do {\
> +rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
> +if ( rc ) goto done;\
> +} while (0)
> +
> +static inline int mkec(uint8_t e, int32_t ec, ...)
> +{
> +return (e < 32 && ((1u << e) & EXC_HAS_EC)) ? ec : X86_EVENT_NO_EC;
> +}
> +
> +#define generate_exception_if(p, e, ec...)\
> +({  if ( (p) ) {  \
> +x86_emul_hw_exception(e, mkec(e, ##ec, 0), ctxt); \
> +rc = X86EMUL_EXCEPTION;   \
> +goto done;\
> +} \
> +})
> +
> +#define generate_exception(e, ec...) generate_exception_if(true, e, ##ec)
> +
>  #ifdef __XEN__
>  # define invoke_stub(pre, post, constraints...) do {\
>  union stub_exception_token res_ = { .raw = ~0 };\
> @@ -911,27 +932,6 @@ do{ asm volatile (
>  # define mode_64bit() false
>  #endif
>
> -#define fail_if(p)  \
> -do {\
> -rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
> -if ( rc ) goto done;\
> -} while (0)
> -
> -static inline int mkec(uint8_t e, int32_t ec, ...)
> -{
> -return (e < 32 && ((1u << e) & EXC_HAS_EC)) ? ec : X86_EVENT_NO_EC;
> -}
> -
> -#define generate_exception_if(p, e, ec...)\
> -({  if ( (p) ) {  \
> -x86_emul_hw_exception(e, mkec(e, ##ec, 0), ctxt); \
> -rc = X86EMUL_EXCEPTION;   \
> -goto done;\
> -} \
> -})
> -
> -#define generate_exception(e, ec...) generate_exception_if(true, e, ##ec)
> -
>  /*
>   * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
>   * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result 
> only.
> @@ -3596,6 +3596,11 @@ x86_emulate(
>  dst.bytes = 2;
>  break;
>
> +case 0x8d: /* lea */
> +generate_exception_if(ea.type != OP_MEM, EXC_UD);
> +dst.val = ea.mem.off;
> +break;
> +
>  case 0x8e: /* mov r/m,Sreg */
>  seg = modrm_reg & 7; /* REX.R is ignored. */
>  generate_exception_if(!is_x86_user_segment(seg) ||
> @@ -3607,11 +3612,6 @@ x86_emulate(
>  dst.type = OP_NONE;
>  break;
>
> -case 0x8d: /* lea */
> -generate_exception_if(ea.type != OP_MEM, EXC_UD);
> -dst.val = ea.mem.off;
> -break;
> -
>  case 0x8f: /* pop (sole member of Grp1a) */
>  generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
>  /* 64-bit mode: POP defaults to a 64-bit operand. */
> @@ -5746,12 +5746,6 @@ x86_emulate(
>  _regs.r(ax) = (uint32_t)msr_val;
>  break;
>
> -case X86EMUL_OPC(0x0f, 0x40) ... X86EMUL_OPC(0x0f, 0x4f): /* cmovcc */
> -vcpu_must_have(cmov);
> -if ( test_cc(b, _regs.eflags) )
> -dst.val = src.val;
> -break;
> -
>  case X86EMUL_OPC(0x0f, 0x34): /* sysenter */
>  vcpu_must_have(sep);
>  generate_exception_if(mode_ring0(), EXC_GP, 0);
> @@ -5834,6 +5828,12 @@ x86_emulate(
>  singlestep = _regs.eflags & X86_EFLAGS_TF;
>  break;
>
> +case X86EMUL_OPC(0x0f, 0x40) ... X86EMUL_OPC(0x0f, 0x4f): /* cmovcc */
> +vcpu_must_have(cmov);
> +if ( test_cc(b, _regs.eflags) )
> +dst.val = src.val;
> +break;
> +
>  

[Xen-devel] [PATCH 02/17] x86emul: re-order cases of main switch statement

2017-06-21 Thread Jan Beulich
Re-store intended numerical ordering, which has become "violated"
mostly by incremental additions where moving around bigger chunks did
not seem advisable. One exception though at the very top of the
switch(): Keeping the arithmetic ops together seems preferable over
entirely strict ordering.

Additionally move a few macro definitions before their first uses (the
placement is benign as long as those uses are themselves only macro
definitions, but that's going to change when those macros have helpers
broken out).

No (intended) functional change.

Signed-off-by: Jan Beulich 

--- a/xen/arch/x86/x86_emulate/x86_emulate.c
+++ b/xen/arch/x86/x86_emulate/x86_emulate.c
@@ -843,6 +843,27 @@ do{ asm volatile (
 #define __emulate_1op_8byte(_op, _dst, _eflags)
 #endif /* __i386__ */
 
+#define fail_if(p)  \
+do {\
+rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
+if ( rc ) goto done;\
+} while (0)
+
+static inline int mkec(uint8_t e, int32_t ec, ...)
+{
+return (e < 32 && ((1u << e) & EXC_HAS_EC)) ? ec : X86_EVENT_NO_EC;
+}
+
+#define generate_exception_if(p, e, ec...)\
+({  if ( (p) ) {  \
+x86_emul_hw_exception(e, mkec(e, ##ec, 0), ctxt); \
+rc = X86EMUL_EXCEPTION;   \
+goto done;\
+} \
+})
+
+#define generate_exception(e, ec...) generate_exception_if(true, e, ##ec)
+
 #ifdef __XEN__
 # define invoke_stub(pre, post, constraints...) do {\
 union stub_exception_token res_ = { .raw = ~0 };\
@@ -911,27 +932,6 @@ do{ asm volatile (
 # define mode_64bit() false
 #endif
 
-#define fail_if(p)  \
-do {\
-rc = (p) ? X86EMUL_UNHANDLEABLE : X86EMUL_OKAY; \
-if ( rc ) goto done;\
-} while (0)
-
-static inline int mkec(uint8_t e, int32_t ec, ...)
-{
-return (e < 32 && ((1u << e) & EXC_HAS_EC)) ? ec : X86_EVENT_NO_EC;
-}
-
-#define generate_exception_if(p, e, ec...)\
-({  if ( (p) ) {  \
-x86_emul_hw_exception(e, mkec(e, ##ec, 0), ctxt); \
-rc = X86EMUL_EXCEPTION;   \
-goto done;\
-} \
-})
-
-#define generate_exception(e, ec...) generate_exception_if(true, e, ##ec)
-
 /*
  * Given byte has even parity (even number of 1s)? SDM Vol. 1 Sec. 3.4.3.1,
  * "Status Flags": EFLAGS.PF reflects parity of least-sig. byte of result only.
@@ -3596,6 +3596,11 @@ x86_emulate(
 dst.bytes = 2;
 break;
 
+case 0x8d: /* lea */
+generate_exception_if(ea.type != OP_MEM, EXC_UD);
+dst.val = ea.mem.off;
+break;
+
 case 0x8e: /* mov r/m,Sreg */
 seg = modrm_reg & 7; /* REX.R is ignored. */
 generate_exception_if(!is_x86_user_segment(seg) ||
@@ -3607,11 +3612,6 @@ x86_emulate(
 dst.type = OP_NONE;
 break;
 
-case 0x8d: /* lea */
-generate_exception_if(ea.type != OP_MEM, EXC_UD);
-dst.val = ea.mem.off;
-break;
-
 case 0x8f: /* pop (sole member of Grp1a) */
 generate_exception_if((modrm_reg & 7) != 0, EXC_UD);
 /* 64-bit mode: POP defaults to a 64-bit operand. */
@@ -5746,12 +5746,6 @@ x86_emulate(
 _regs.r(ax) = (uint32_t)msr_val;
 break;
 
-case X86EMUL_OPC(0x0f, 0x40) ... X86EMUL_OPC(0x0f, 0x4f): /* cmovcc */
-vcpu_must_have(cmov);
-if ( test_cc(b, _regs.eflags) )
-dst.val = src.val;
-break;
-
 case X86EMUL_OPC(0x0f, 0x34): /* sysenter */
 vcpu_must_have(sep);
 generate_exception_if(mode_ring0(), EXC_GP, 0);
@@ -5834,6 +5828,12 @@ x86_emulate(
 singlestep = _regs.eflags & X86_EFLAGS_TF;
 break;
 
+case X86EMUL_OPC(0x0f, 0x40) ... X86EMUL_OPC(0x0f, 0x4f): /* cmovcc */
+vcpu_must_have(cmov);
+if ( test_cc(b, _regs.eflags) )
+dst.val = src.val;
+break;
+
 CASE_SIMD_PACKED_FP(, 0x0f, 0x50): /* movmskp{s,d} xmm,reg */
 CASE_SIMD_PACKED_FP(_VEX, 0x0f, 0x50): /* vmovmskp{s,d} {x,y}mm,reg */
 CASE_SIMD_PACKED_INT(0x0f, 0xd7):  /* pmovmskb {,x}mm,reg */
@@ -6050,10 +6050,6 @@ x86_emulate(
 get_fpu(X86EMUL_FPU_mmx, );
 goto simd_0f_common;
 
-case X86EMUL_OPC_VEX_66(0x0f38, 0x41): /* vphminposuw xmm/m128,xmm,xmm */
-generate_exception_if(vex.l,