[PATCH 2/2] x86: replace EXxmm_mdq by EXVexWdqScalar

Jan Beulich jbeulich@suse.com
Thu Jan 30 16:38:00 GMT 2020


There's no need to have two operand specifiers / enumerators for the
same purpose. This then renders xmm_mdq_mode unused.

opcodes/
2020-01-XX  Jan Beulich  <jbeulich@suse.com>

	* i386-dis.c (EXxmm_mdq, xmm_mdq_mode): Delete.
	(intel_operand_size, OP_EX): Drop xmm_mdq_mode case label.
	(OP_E_memory): Replace xmm_mdq_mode case label by
	vex_scalar_w_dq_mode one.
	* i386-dis-evex-prefix.h: Replace EXxmm_mdq by EXVexWdqScalar.

--- a/opcodes/i386-dis-evex-prefix.h
+++ b/opcodes/i386-dis-evex-prefix.h
@@ -907,7 +907,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vscalefs%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vscalefs%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F3830 */
   {
@@ -1021,7 +1021,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vgetexps%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexS }, 0 },
+    { "vgetexps%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexS }, 0 },
   },
   /* PREFIX_EVEX_0F3844 */
   {
@@ -1057,7 +1057,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vrcp14s%XW",	{ XMScalar, VexScalar, EXxmm_mdq }, 0 },
+    { "vrcp14s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar }, 0 },
   },
   /* PREFIX_EVEX_0F384E */
   {
@@ -1069,7 +1069,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vrsqrt14s%XW",	{ XMScalar, VexScalar, EXxmm_mdq }, 0 },
+    { "vrsqrt14s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar }, 0 },
   },
   /* PREFIX_EVEX_0F3850 */
   {
@@ -1349,7 +1349,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfmadd132s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfmadd132s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F389A */
   {
@@ -1362,7 +1362,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfmsub132s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfmsub132s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
     { "v4fmaddss",	{ XMScalar, VexScalar, Mxmm }, 0 },
   },
   /* PREFIX_EVEX_0F389C */
@@ -1375,7 +1375,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfnmadd132s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfnmadd132s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F389E */
   {
@@ -1387,7 +1387,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfnmsub132s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfnmsub132s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38A0 */
   {
@@ -1435,7 +1435,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfmadd213s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfmadd213s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38AA */
   {
@@ -1448,7 +1448,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfmsub213s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfmsub213s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
     { "v4fnmaddss",	{ XMScalar, VexScalar, Mxmm }, 0 },
   },
   /* PREFIX_EVEX_0F38AC */
@@ -1461,7 +1461,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfnmadd213s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfnmadd213s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38AE */
   {
@@ -1473,7 +1473,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfnmsub213s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfnmsub213s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38B4 */
   {
@@ -1509,7 +1509,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfmadd231s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfmadd231s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38BA */
   {
@@ -1521,7 +1521,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfmsub231s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfmsub231s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38BC */
   {
@@ -1533,7 +1533,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfnmadd231s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfnmadd231s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38BE */
   {
@@ -1545,7 +1545,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfnmsub231s%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexR }, 0 },
+    { "vfnmsub231s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexR }, 0 },
   },
   /* PREFIX_EVEX_0F38C4 */
   {
@@ -1617,7 +1617,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vrcp28s%XW",       { XMScalar, VexScalar, EXxmm_mdq, EXxEVexS }, 0 },
+    { "vrcp28s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexS }, 0 },
   },
   /* PREFIX_EVEX_0F38CC */
   {
@@ -1629,7 +1629,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vrsqrt28s%XW",     { XMScalar, VexScalar, EXxmm_mdq, EXxEVexS }, 0 },
+    { "vrsqrt28s%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexS }, 0 },
   },
   /* PREFIX_EVEX_0F38CF */
   {
@@ -1827,7 +1827,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vgetmants%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexS, Ib }, 0 },
+    { "vgetmants%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexS, Ib }, 0 },
   },
   /* PREFIX_EVEX_0F3A38 */
   {
@@ -1905,7 +1905,7 @@
   {
     { Bad_Opcode },
     { Bad_Opcode },
-    { "vfixupimms%XW",	{ XMScalar, VexScalar, EXxmm_mdq, EXxEVexS, Ib }, 0 },
+    { "vfixupimms%XW",	{ XMScalar, VexScalar, EXVexWdqScalar, EXxEVexS, Ib }, 0 },
   },
   /* PREFIX_EVEX_0F3A56 */
   {
--- a/opcodes/i386-dis.c
+++ b/opcodes/i386-dis.c
@@ -401,7 +401,6 @@ fetch_data (struct disassemble_info *inf
 #define EXxmm_mw { OP_EX, xmm_mw_mode }
 #define EXxmm_md { OP_EX, xmm_md_mode }
 #define EXxmm_mq { OP_EX, xmm_mq_mode }
-#define EXxmm_mdq { OP_EX, xmm_mdq_mode }
 #define EXxmmdw { OP_EX, xmmdw_mode }
 #define EXxmmqd { OP_EX, xmmqd_mode }
 #define EXymmq { OP_EX, ymmq_mode }
@@ -537,9 +536,6 @@ enum
   xmm_md_mode,
   /* XMM register or quad word memory operand */
   xmm_mq_mode,
-  /* XMM register or double/quad word memory operand, depending on
-     VEX.W.  */
-  xmm_mdq_mode,
   /* 16-byte XMM, word, double word or quad word operand.  */
   xmmdw_mode,
   /* 16-byte XMM, double word, quad word operand or xmm word operand.  */
@@ -13771,7 +13767,6 @@ intel_operand_size (int bytemode, int si
     case o_mode:
       oappend ("OWORD PTR ");
       break;
-    case xmm_mdq_mode:
     case vex_scalar_w_dq_mode:
       if (!need_vex)
 	abort ();
@@ -14016,12 +14011,12 @@ OP_E_memory (int bytemode, int sizeflag)
 	      break;
 	    }
 	    /* fall through */
+	case vex_scalar_w_dq_mode:
 	case vex_vsib_d_w_dq_mode:
 	case vex_vsib_d_w_d_mode:
 	case vex_vsib_q_w_dq_mode:
 	case vex_vsib_q_w_d_mode:
 	case evex_x_gscat_mode:
-	case xmm_mdq_mode:
 	  shift = vex.w ? 3 : 2;
 	  break;
 	case x_mode:
@@ -15367,7 +15362,6 @@ OP_EX (int bytemode, int sizeflag)
       && bytemode != xmm_mw_mode
       && bytemode != xmm_md_mode
       && bytemode != xmm_mq_mode
-      && bytemode != xmm_mdq_mode
       && bytemode != xmmq_mode
       && bytemode != evex_half_bcst_xmmq_mode
       && bytemode != ymm_mode



More information about the Binutils mailing list