summaryrefslogtreecommitdiff
path: root/insns.dat
diff options
context:
space:
mode:
authorAnas Nashif <anas.nashif@intel.com>2012-11-05 12:56:51 -0800
committerAnas Nashif <anas.nashif@intel.com>2012-11-05 12:56:51 -0800
commit300d4816804c8ceb4a4601a49ec3ec479c1951b5 (patch)
tree7164fe3883806d0d464103fd37dcebbdd2855a1e /insns.dat
downloadnasm-300d4816804c8ceb4a4601a49ec3ec479c1951b5.tar.gz
nasm-300d4816804c8ceb4a4601a49ec3ec479c1951b5.tar.bz2
nasm-300d4816804c8ceb4a4601a49ec3ec479c1951b5.zip
Imported Upstream version 2.09.08upstream/2.09.08
Diffstat (limited to 'insns.dat')
-rw-r--r--insns.dat3312
1 files changed, 3312 insertions, 0 deletions
diff --git a/insns.dat b/insns.dat
new file mode 100644
index 0000000..7574dc3
--- /dev/null
+++ b/insns.dat
@@ -0,0 +1,3312 @@
+;; --------------------------------------------------------------------------
+;;
+;; Copyright 1996-2010 The NASM Authors - All Rights Reserved
+;; See the file AUTHORS included with the NASM distribution for
+;; the specific copyright holders.
+;;
+;; Redistribution and use in source and binary forms, with or without
+;; modification, are permitted provided that the following
+;; conditions are met:
+;;
+;; * Redistributions of source code must retain the above copyright
+;; notice, this list of conditions and the following disclaimer.
+;; * Redistributions in binary form must reproduce the above
+;; copyright notice, this list of conditions and the following
+;; disclaimer in the documentation and/or other materials provided
+;; with the distribution.
+;;
+;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+;; CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+;; INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+;; MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+;; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+;; SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+;; NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+;; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+;; HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+;; OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+;; EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+;;
+;; --------------------------------------------------------------------------
+
+;
+; insns.dat table of instructions for the Netwide Assembler
+;
+; Format of file: All four fields must be present on every functional
+; line. Hence `void' for no-operand instructions, and `\0' for such
+; as EQU. If the last three fields are all `ignore', no action is
+; taken except to register the opcode as being present.
+;
+; For a detailed description of the code string (third field), please
+; see the comment at the top of assemble.c. For a detailed description
+; of the flags (fourth field), please see insns.h.
+;
+; Comments with a pound sign after the semicolon generate section
+; subheaders in the NASM documentation.
+;
+
+;# Special instructions...
+DB ignore ignore ignore
+DW ignore ignore ignore
+DD ignore ignore ignore
+DQ ignore ignore ignore
+DT ignore ignore ignore
+DO ignore ignore ignore
+DY ignore ignore ignore
+RESB imm \340 8086
+RESW ignore ignore ignore
+RESD ignore ignore ignore
+RESQ ignore ignore ignore
+REST ignore ignore ignore
+RESO ignore ignore ignore
+RESY ignore ignore ignore
+
+;# Conventional instructions
+AAA void \1\x37 8086,NOLONG
+AAD void \2\xD5\x0A 8086,NOLONG
+AAD imm \1\xD5\24 8086,SB,NOLONG
+AAM void \2\xD4\x0A 8086,NOLONG
+AAM imm \1\xD4\24 8086,SB,NOLONG
+AAS void \1\x3F 8086,NOLONG
+ADC mem,reg8 \1\x10\101 8086,SM
+ADC reg8,reg8 \1\x10\101 8086
+ADC mem,reg16 \320\1\x11\101 8086,SM
+ADC reg16,reg16 \320\1\x11\101 8086
+ADC mem,reg32 \321\1\x11\101 386,SM
+ADC reg32,reg32 \321\1\x11\101 386
+ADC mem,reg64 \324\1\x11\101 X64,SM
+ADC reg64,reg64 \324\1\x11\101 X64
+ADC reg8,mem \1\x12\110 8086,SM
+ADC reg8,reg8 \1\x12\110 8086
+ADC reg16,mem \320\1\x13\110 8086,SM
+ADC reg16,reg16 \320\1\x13\110 8086
+ADC reg32,mem \321\1\x13\110 386,SM
+ADC reg32,reg32 \321\1\x13\110 386
+ADC reg64,mem \324\1\x13\110 X64,SM
+ADC reg64,reg64 \324\1\x13\110 X64
+ADC rm16,imm8 \320\1\x83\202\275 8086
+ADC rm32,imm8 \321\1\x83\202\275 386
+ADC rm64,imm8 \324\1\x83\202\275 X64
+ADC reg_al,imm \1\x14\21 8086,SM
+ADC reg_ax,sbyte16 \320\1\x83\202\275 8086,SM
+ADC reg_ax,imm \320\1\x15\31 8086,SM
+ADC reg_eax,sbyte32 \321\1\x83\202\275 386,SM
+ADC reg_eax,imm \321\1\x15\41 386,SM
+ADC reg_rax,sbyte64 \324\1\x83\202\275 X64,SM
+ADC reg_rax,imm \324\1\x15\255 X64,SM
+ADC rm8,imm \1\x80\202\21 8086,SM
+ADC rm16,imm \320\145\x81\202\141 8086,SM
+ADC rm32,imm \321\155\x81\202\151 386,SM
+ADC rm64,imm \324\155\x81\202\251 X64,SM
+ADC mem,imm8 \1\x80\202\21 8086,SM
+ADC mem,imm16 \320\145\x81\202\141 8086,SM
+ADC mem,imm32 \321\155\x81\202\151 386,SM
+ADD mem,reg8 \1\x00\101 8086,SM
+ADD reg8,reg8 \1\x00\101 8086
+ADD mem,reg16 \320\1\x01\101 8086,SM
+ADD reg16,reg16 \320\1\x01\101 8086
+ADD mem,reg32 \321\1\x01\101 386,SM
+ADD reg32,reg32 \321\1\x01\101 386
+ADD mem,reg64 \324\1\x01\101 X64,SM
+ADD reg64,reg64 \324\1\x01\101 X64
+ADD reg8,mem \1\x02\110 8086,SM
+ADD reg8,reg8 \1\x02\110 8086
+ADD reg16,mem \320\1\x03\110 8086,SM
+ADD reg16,reg16 \320\1\x03\110 8086
+ADD reg32,mem \321\1\x03\110 386,SM
+ADD reg32,reg32 \321\1\x03\110 386
+ADD reg64,mem \324\1\x03\110 X64,SM
+ADD reg64,reg64 \324\1\x03\110 X64
+ADD rm16,imm8 \320\1\x83\200\275 8086
+ADD rm32,imm8 \321\1\x83\200\275 386
+ADD rm64,imm8 \324\1\x83\200\275 X64
+ADD reg_al,imm \1\x04\21 8086,SM
+ADD reg_ax,sbyte16 \320\1\x83\200\275 8086,SM
+ADD reg_ax,imm \320\1\x05\31 8086,SM
+ADD reg_eax,sbyte32 \321\1\x83\200\275 386,SM
+ADD reg_eax,imm \321\1\x05\41 386,SM
+ADD reg_rax,sbyte64 \324\1\x83\200\275 X64,SM
+ADD reg_rax,imm \324\1\x05\255 X64,SM
+ADD rm8,imm \1\x80\200\21 8086,SM
+ADD rm16,imm \320\145\x81\200\141 8086,SM
+ADD rm32,imm \321\155\x81\200\151 386,SM
+ADD rm64,imm \324\155\x81\200\251 X64,SM
+ADD mem,imm8 \1\x80\200\21 8086,SM
+ADD mem,imm16 \320\145\x81\200\141 8086,SM
+ADD mem,imm32 \321\155\x81\200\151 386,SM
+AND mem,reg8 \1\x20\101 8086,SM
+AND reg8,reg8 \1\x20\101 8086
+AND mem,reg16 \320\1\x21\101 8086,SM
+AND reg16,reg16 \320\1\x21\101 8086
+AND mem,reg32 \321\1\x21\101 386,SM
+AND reg32,reg32 \321\1\x21\101 386
+AND mem,reg64 \324\1\x21\101 X64,SM
+AND reg64,reg64 \324\1\x21\101 X64
+AND reg8,mem \1\x22\110 8086,SM
+AND reg8,reg8 \1\x22\110 8086
+AND reg16,mem \320\1\x23\110 8086,SM
+AND reg16,reg16 \320\1\x23\110 8086
+AND reg32,mem \321\1\x23\110 386,SM
+AND reg32,reg32 \321\1\x23\110 386
+AND reg64,mem \324\1\x23\110 X64,SM
+AND reg64,reg64 \324\1\x23\110 X64
+AND rm16,imm8 \320\1\x83\204\275 8086
+AND rm32,imm8 \321\1\x83\204\275 386
+AND rm64,imm8 \324\1\x83\204\275 X64
+AND reg_al,imm \1\x24\21 8086,SM
+AND reg_ax,sbyte16 \320\1\x83\204\275 8086,SM
+AND reg_ax,imm \320\1\x25\31 8086,SM
+AND reg_eax,sbyte32 \321\1\x83\204\275 386,SM
+AND reg_eax,imm \321\1\x25\41 386,SM
+AND reg_rax,sbyte64 \324\1\x83\204\275 X64,SM
+AND reg_rax,imm \324\1\x25\255 X64,SM
+AND rm8,imm \1\x80\204\21 8086,SM
+AND rm16,imm \320\145\x81\204\141 8086,SM
+AND rm32,imm \321\155\x81\204\151 386,SM
+AND rm64,imm \324\155\x81\204\251 X64,SM
+AND mem,imm8 \1\x80\204\21 8086,SM
+AND mem,imm16 \320\145\x81\204\141 8086,SM
+AND mem,imm32 \321\155\x81\204\151 386,SM
+ARPL mem,reg16 \1\x63\101 286,PROT,SM,NOLONG
+ARPL reg16,reg16 \1\x63\101 286,PROT,NOLONG
+BB0_RESET void \2\x0F\x3A PENT,CYRIX,ND
+BB1_RESET void \2\x0F\x3B PENT,CYRIX,ND
+BOUND reg16,mem \320\1\x62\110 186,NOLONG
+BOUND reg32,mem \321\1\x62\110 386,NOLONG
+BSF reg16,mem \320\2\x0F\xBC\110 386,SM
+BSF reg16,reg16 \320\2\x0F\xBC\110 386
+BSF reg32,mem \321\2\x0F\xBC\110 386,SM
+BSF reg32,reg32 \321\2\x0F\xBC\110 386
+BSF reg64,mem \324\2\x0F\xBC\110 X64,SM
+BSF reg64,reg64 \324\2\x0F\xBC\110 X64
+BSR reg16,mem \320\2\x0F\xBD\110 386,SM
+BSR reg16,reg16 \320\2\x0F\xBD\110 386
+BSR reg32,mem \321\2\x0F\xBD\110 386,SM
+BSR reg32,reg32 \321\2\x0F\xBD\110 386
+BSR reg64,mem \324\2\x0F\xBD\110 X64,SM
+BSR reg64,reg64 \324\2\x0F\xBD\110 X64
+BSWAP reg32 \321\1\x0F\10\xC8 486
+BSWAP reg64 \324\1\x0F\10\xC8 X64
+BT mem,reg16 \320\2\x0F\xA3\101 386,SM
+BT reg16,reg16 \320\2\x0F\xA3\101 386
+BT mem,reg32 \321\2\x0F\xA3\101 386,SM
+BT reg32,reg32 \321\2\x0F\xA3\101 386
+BT mem,reg64 \324\2\x0F\xA3\101 X64,SM
+BT reg64,reg64 \324\2\x0F\xA3\101 X64
+BT rm16,imm \320\2\x0F\xBA\204\25 386,SB
+BT rm32,imm \321\2\x0F\xBA\204\25 386,SB
+BT rm64,imm \324\2\x0F\xBA\204\25 X64,SB
+BTC mem,reg16 \320\2\x0F\xBB\101 386,SM
+BTC reg16,reg16 \320\2\x0F\xBB\101 386
+BTC mem,reg32 \321\2\x0F\xBB\101 386,SM
+BTC reg32,reg32 \321\2\x0F\xBB\101 386
+BTC mem,reg64 \324\2\x0F\xBB\101 X64,SM
+BTC reg64,reg64 \324\2\x0F\xBB\101 X64
+BTC rm16,imm \320\2\x0F\xBA\207\25 386,SB
+BTC rm32,imm \321\2\x0F\xBA\207\25 386,SB
+BTC rm64,imm \324\2\x0F\xBA\207\25 X64,SB
+BTR mem,reg16 \320\2\x0F\xB3\101 386,SM
+BTR reg16,reg16 \320\2\x0F\xB3\101 386
+BTR mem,reg32 \321\2\x0F\xB3\101 386,SM
+BTR reg32,reg32 \321\2\x0F\xB3\101 386
+BTR mem,reg64 \324\2\x0F\xB3\101 X64,SM
+BTR reg64,reg64 \324\2\x0F\xB3\101 X64
+BTR rm16,imm \320\2\x0F\xBA\206\25 386,SB
+BTR rm32,imm \321\2\x0F\xBA\206\25 386,SB
+BTR rm64,imm \324\2\x0F\xBA\206\25 X64,SB
+BTS mem,reg16 \320\2\x0F\xAB\101 386,SM
+BTS reg16,reg16 \320\2\x0F\xAB\101 386
+BTS mem,reg32 \321\2\x0F\xAB\101 386,SM
+BTS reg32,reg32 \321\2\x0F\xAB\101 386
+BTS mem,reg64 \324\2\x0F\xAB\101 X64,SM
+BTS reg64,reg64 \324\2\x0F\xAB\101 X64
+BTS rm16,imm \320\2\x0F\xBA\205\25 386,SB
+BTS rm32,imm \321\2\x0F\xBA\205\25 386,SB
+BTS rm64,imm \324\2\x0F\xBA\205\25 X64,SB
+CALL imm \322\1\xE8\64 8086
+CALL imm|near \322\1\xE8\64 8086
+CALL imm|far \322\1\x9A\34\74 8086,ND,NOLONG
+CALL imm16 \320\1\xE8\64 8086
+CALL imm16|near \320\1\xE8\64 8086
+CALL imm16|far \320\1\x9A\34\74 8086,ND,NOLONG
+CALL imm32 \321\1\xE8\64 386
+CALL imm32|near \321\1\xE8\64 386
+CALL imm32|far \321\1\x9A\34\74 386,ND,NOLONG
+CALL imm:imm \322\1\x9A\35\30 8086,NOLONG
+CALL imm16:imm \320\1\x9A\31\30 8086,NOLONG
+CALL imm:imm16 \320\1\x9A\31\30 8086,NOLONG
+CALL imm32:imm \321\1\x9A\41\30 386,NOLONG
+CALL imm:imm32 \321\1\x9A\41\30 386,NOLONG
+CALL mem|far \322\1\xFF\203 8086,NOLONG
+CALL mem|far \324\1\xFF\203 X64
+CALL mem16|far \320\1\xFF\203 8086
+CALL mem32|far \321\1\xFF\203 386
+CALL mem64|far \324\1\xFF\203 X64
+CALL mem|near \322\1\xFF\202 8086
+CALL mem16|near \320\1\xFF\202 8086
+CALL mem32|near \321\1\xFF\202 386,NOLONG
+CALL mem64|near \324\1\xFF\202 X64
+CALL reg16 \320\1\xFF\202 8086
+CALL reg32 \321\1\xFF\202 386,NOLONG
+CALL reg64 \323\1\xFF\202 X64
+CALL mem \322\1\xFF\202 8086
+CALL mem16 \320\1\xFF\202 8086
+CALL mem32 \321\1\xFF\202 386,NOLONG
+CALL mem64 \323\1\xFF\202 X64
+CBW void \320\1\x98 8086
+CDQ void \321\1\x99 386
+CDQE void \324\1\x98 X64
+CLC void \1\xF8 8086
+CLD void \1\xFC 8086
+CLGI void \3\x0F\x01\xDD X64,AMD
+CLI void \1\xFA 8086
+CLTS void \2\x0F\x06 286,PRIV
+CMC void \1\xF5 8086
+CMP mem,reg8 \1\x38\101 8086,SM
+CMP reg8,reg8 \1\x38\101 8086
+CMP mem,reg16 \320\1\x39\101 8086,SM
+CMP reg16,reg16 \320\1\x39\101 8086
+CMP mem,reg32 \321\1\x39\101 386,SM
+CMP reg32,reg32 \321\1\x39\101 386
+CMP mem,reg64 \324\1\x39\101 X64,SM
+CMP reg64,reg64 \324\1\x39\101 X64
+CMP reg8,mem \1\x3A\110 8086,SM
+CMP reg8,reg8 \1\x3A\110 8086
+CMP reg16,mem \320\1\x3B\110 8086,SM
+CMP reg16,reg16 \320\1\x3B\110 8086
+CMP reg32,mem \321\1\x3B\110 386,SM
+CMP reg32,reg32 \321\1\x3B\110 386
+CMP reg64,mem \324\1\x3B\110 X64,SM
+CMP reg64,reg64 \324\1\x3B\110 X64
+CMP rm16,imm8 \320\1\x83\207\275 8086
+CMP rm32,imm8 \321\1\x83\207\275 386
+CMP rm64,imm8 \324\1\x83\207\275 X64
+CMP reg_al,imm \1\x3C\21 8086,SM
+CMP reg_ax,sbyte16 \320\1\x83\207\275 8086,SM
+CMP reg_ax,imm \320\1\x3D\31 8086,SM
+CMP reg_eax,sbyte32 \321\1\x83\207\275 386,SM
+CMP reg_eax,imm \321\1\x3D\41 386,SM
+CMP reg_rax,sbyte64 \324\1\x83\207\275 X64,SM
+CMP reg_rax,imm \324\1\x3D\255 X64,SM
+CMP rm8,imm \1\x80\207\21 8086,SM
+CMP rm16,imm \320\145\x81\207\141 8086,SM
+CMP rm32,imm \321\155\x81\207\151 386,SM
+CMP rm64,imm \324\155\x81\207\251 X64,SM
+CMP mem,imm8 \1\x80\207\21 8086,SM
+CMP mem,imm16 \320\145\x81\207\141 8086,SM
+CMP mem,imm32 \321\155\x81\207\151 386,SM
+CMPSB void \335\1\xA6 8086
+CMPSD void \335\321\1\xA7 386
+CMPSQ void \335\324\1\xA7 X64
+CMPSW void \335\320\1\xA7 8086
+CMPXCHG mem,reg8 \2\x0F\xB0\101 PENT,SM
+CMPXCHG reg8,reg8 \2\x0F\xB0\101 PENT
+CMPXCHG mem,reg16 \320\2\x0F\xB1\101 PENT,SM
+CMPXCHG reg16,reg16 \320\2\x0F\xB1\101 PENT
+CMPXCHG mem,reg32 \321\2\x0F\xB1\101 PENT,SM
+CMPXCHG reg32,reg32 \321\2\x0F\xB1\101 PENT
+CMPXCHG mem,reg64 \324\2\x0F\xB1\101 X64,SM
+CMPXCHG reg64,reg64 \324\2\x0F\xB1\101 X64
+CMPXCHG486 mem,reg8 \2\x0F\xA6\101 486,SM,UNDOC,ND
+CMPXCHG486 reg8,reg8 \2\x0F\xA6\101 486,UNDOC,ND
+CMPXCHG486 mem,reg16 \320\2\x0F\xA7\101 486,SM,UNDOC,ND
+CMPXCHG486 reg16,reg16 \320\2\x0F\xA7\101 486,UNDOC,ND
+CMPXCHG486 mem,reg32 \321\2\x0F\xA7\101 486,SM,UNDOC,ND
+CMPXCHG486 reg32,reg32 \321\2\x0F\xA7\101 486,UNDOC,ND
+CMPXCHG8B mem \2\x0F\xC7\201 PENT
+CMPXCHG16B mem \324\2\x0F\xC7\201 X64
+CPUID void \2\x0F\xA2 PENT
+CPU_READ void \2\x0F\x3D PENT,CYRIX
+CPU_WRITE void \2\x0F\x3C PENT,CYRIX
+CQO void \324\1\x99 X64
+CWD void \320\1\x99 8086
+CWDE void \321\1\x98 386
+DAA void \1\x27 8086,NOLONG
+DAS void \1\x2F 8086,NOLONG
+DEC reg16 \320\10\x48 8086,NOLONG
+DEC reg32 \321\10\x48 386,NOLONG
+DEC rm8 \1\xFE\201 8086
+DEC rm16 \320\1\xFF\201 8086
+DEC rm32 \321\1\xFF\201 386
+DEC rm64 \324\1\xFF\201 X64
+DIV rm8 \1\xF6\206 8086
+DIV rm16 \320\1\xF7\206 8086
+DIV rm32 \321\1\xF7\206 386
+DIV rm64 \324\1\xF7\206 X64
+DMINT void \2\x0F\x39 P6,CYRIX
+EMMS void \2\x0F\x77 PENT,MMX
+ENTER imm,imm \1\xC8\30\25 186
+EQU imm \0 8086
+EQU imm:imm \0 8086
+F2XM1 void \2\xD9\xF0 8086,FPU
+FABS void \2\xD9\xE1 8086,FPU
+FADD mem32 \1\xD8\200 8086,FPU
+FADD mem64 \1\xDC\200 8086,FPU
+FADD fpureg|to \1\xDC\10\xC0 8086,FPU
+FADD fpureg \1\xD8\10\xC0 8086,FPU
+FADD fpureg,fpu0 \1\xDC\10\xC0 8086,FPU
+FADD fpu0,fpureg \1\xD8\11\xC0 8086,FPU
+FADD void \2\xDE\xC1 8086,FPU,ND
+FADDP fpureg \1\xDE\10\xC0 8086,FPU
+FADDP fpureg,fpu0 \1\xDE\10\xC0 8086,FPU
+FADDP void \2\xDE\xC1 8086,FPU,ND
+FBLD mem80 \1\xDF\204 8086,FPU
+FBLD mem \1\xDF\204 8086,FPU
+FBSTP mem80 \1\xDF\206 8086,FPU
+FBSTP mem \1\xDF\206 8086,FPU
+FCHS void \2\xD9\xE0 8086,FPU
+FCLEX void \341\2\xDB\xE2 8086,FPU
+FCMOVB fpureg \1\xDA\10\xC0 P6,FPU
+FCMOVB fpu0,fpureg \1\xDA\11\xC0 P6,FPU
+FCMOVB void \2\xDA\xC1 P6,FPU,ND
+FCMOVBE fpureg \1\xDA\10\xD0 P6,FPU
+FCMOVBE fpu0,fpureg \1\xDA\11\xD0 P6,FPU
+FCMOVBE void \2\xDA\xD1 P6,FPU,ND
+FCMOVE fpureg \1\xDA\10\xC8 P6,FPU
+FCMOVE fpu0,fpureg \1\xDA\11\xC8 P6,FPU
+FCMOVE void \2\xDA\xC9 P6,FPU,ND
+FCMOVNB fpureg \1\xDB\10\xC0 P6,FPU
+FCMOVNB fpu0,fpureg \1\xDB\11\xC0 P6,FPU
+FCMOVNB void \2\xDB\xC1 P6,FPU,ND
+FCMOVNBE fpureg \1\xDB\10\xD0 P6,FPU
+FCMOVNBE fpu0,fpureg \1\xDB\11\xD0 P6,FPU
+FCMOVNBE void \2\xDB\xD1 P6,FPU,ND
+FCMOVNE fpureg \1\xDB\10\xC8 P6,FPU
+FCMOVNE fpu0,fpureg \1\xDB\11\xC8 P6,FPU
+FCMOVNE void \2\xDB\xC9 P6,FPU,ND
+FCMOVNU fpureg \1\xDB\10\xD8 P6,FPU
+FCMOVNU fpu0,fpureg \1\xDB\11\xD8 P6,FPU
+FCMOVNU void \2\xDB\xD9 P6,FPU,ND
+FCMOVU fpureg \1\xDA\10\xD8 P6,FPU
+FCMOVU fpu0,fpureg \1\xDA\11\xD8 P6,FPU
+FCMOVU void \2\xDA\xD9 P6,FPU,ND
+FCOM mem32 \1\xD8\202 8086,FPU
+FCOM mem64 \1\xDC\202 8086,FPU
+FCOM fpureg \1\xD8\10\xD0 8086,FPU
+FCOM fpu0,fpureg \1\xD8\11\xD0 8086,FPU
+FCOM void \2\xD8\xD1 8086,FPU,ND
+FCOMI fpureg \1\xDB\10\xF0 P6,FPU
+FCOMI fpu0,fpureg \1\xDB\11\xF0 P6,FPU
+FCOMI void \2\xDB\xF1 P6,FPU,ND
+FCOMIP fpureg \1\xDF\10\xF0 P6,FPU
+FCOMIP fpu0,fpureg \1\xDF\11\xF0 P6,FPU
+FCOMIP void \2\xDF\xF1 P6,FPU,ND
+FCOMP mem32 \1\xD8\203 8086,FPU
+FCOMP mem64 \1\xDC\203 8086,FPU
+FCOMP fpureg \1\xD8\10\xD8 8086,FPU
+FCOMP fpu0,fpureg \1\xD8\11\xD8 8086,FPU
+FCOMP void \2\xD8\xD9 8086,FPU,ND
+FCOMPP void \2\xDE\xD9 8086,FPU
+FCOS void \2\xD9\xFF 386,FPU
+FDECSTP void \2\xD9\xF6 8086,FPU
+FDISI void \341\2\xDB\xE1 8086,FPU
+FDIV mem32 \1\xD8\206 8086,FPU
+FDIV mem64 \1\xDC\206 8086,FPU
+FDIV fpureg|to \1\xDC\10\xF8 8086,FPU
+FDIV fpureg \1\xD8\10\xF0 8086,FPU
+FDIV fpureg,fpu0 \1\xDC\10\xF8 8086,FPU
+FDIV fpu0,fpureg \1\xD8\11\xF0 8086,FPU
+FDIV void \2\xDE\xF9 8086,FPU,ND
+FDIVP fpureg \1\xDE\10\xF8 8086,FPU
+FDIVP fpureg,fpu0 \1\xDE\10\xF8 8086,FPU
+FDIVP void \2\xDE\xF9 8086,FPU,ND
+FDIVR mem32 \1\xD8\207 8086,FPU
+FDIVR mem64 \1\xDC\207 8086,FPU
+FDIVR fpureg|to \1\xDC\10\xF0 8086,FPU
+FDIVR fpureg,fpu0 \1\xDC\10\xF0 8086,FPU
+FDIVR fpureg \1\xD8\10\xF8 8086,FPU
+FDIVR fpu0,fpureg \1\xD8\11\xF8 8086,FPU
+FDIVR void \2\xDE\xF1 8086,FPU,ND
+FDIVRP fpureg \1\xDE\10\xF0 8086,FPU
+FDIVRP fpureg,fpu0 \1\xDE\10\xF0 8086,FPU
+FDIVRP void \2\xDE\xF1 8086,FPU,ND
+FEMMS void \2\x0F\x0E PENT,3DNOW
+FENI void \341\2\xDB\xE0 8086,FPU
+FFREE fpureg \1\xDD\10\xC0 8086,FPU
+FFREE void \2\xDD\xC1 8086,FPU
+FFREEP fpureg \1\xDF\10\xC0 286,FPU,UNDOC
+FFREEP void \2\xDF\xC1 286,FPU,UNDOC
+FIADD mem32 \1\xDA\200 8086,FPU
+FIADD mem16 \1\xDE\200 8086,FPU
+FICOM mem32 \1\xDA\202 8086,FPU
+FICOM mem16 \1\xDE\202 8086,FPU
+FICOMP mem32 \1\xDA\203 8086,FPU
+FICOMP mem16 \1\xDE\203 8086,FPU
+FIDIV mem32 \1\xDA\206 8086,FPU
+FIDIV mem16 \1\xDE\206 8086,FPU
+FIDIVR mem32 \1\xDA\207 8086,FPU
+FIDIVR mem16 \1\xDE\207 8086,FPU
+FILD mem32 \1\xDB\200 8086,FPU
+FILD mem16 \1\xDF\200 8086,FPU
+FILD mem64 \1\xDF\205 8086,FPU
+FIMUL mem32 \1\xDA\201 8086,FPU
+FIMUL mem16 \1\xDE\201 8086,FPU
+FINCSTP void \2\xD9\xF7 8086,FPU
+FINIT void \341\2\xDB\xE3 8086,FPU
+FIST mem32 \1\xDB\202 8086,FPU
+FIST mem16 \1\xDF\202 8086,FPU
+FISTP mem32 \1\xDB\203 8086,FPU
+FISTP mem16 \1\xDF\203 8086,FPU
+FISTP mem64 \1\xDF\207 8086,FPU
+FISTTP mem16 \1\xDF\201 PRESCOTT,FPU
+FISTTP mem32 \1\xDB\201 PRESCOTT,FPU
+FISTTP mem64 \1\xDD\201 PRESCOTT,FPU
+FISUB mem32 \1\xDA\204 8086,FPU
+FISUB mem16 \1\xDE\204 8086,FPU
+FISUBR mem32 \1\xDA\205 8086,FPU
+FISUBR mem16 \1\xDE\205 8086,FPU
+FLD mem32 \1\xD9\200 8086,FPU
+FLD mem64 \1\xDD\200 8086,FPU
+FLD mem80 \1\xDB\205 8086,FPU
+FLD fpureg \1\xD9\10\xC0 8086,FPU
+FLD void \2\xD9\xC1 8086,FPU,ND
+FLD1 void \2\xD9\xE8 8086,FPU
+FLDCW mem \1\xD9\205 8086,FPU,SW
+FLDENV mem \1\xD9\204 8086,FPU
+FLDL2E void \2\xD9\xEA 8086,FPU
+FLDL2T void \2\xD9\xE9 8086,FPU
+FLDLG2 void \2\xD9\xEC 8086,FPU
+FLDLN2 void \2\xD9\xED 8086,FPU
+FLDPI void \2\xD9\xEB 8086,FPU
+FLDZ void \2\xD9\xEE 8086,FPU
+FMUL mem32 \1\xD8\201 8086,FPU
+FMUL mem64 \1\xDC\201 8086,FPU
+FMUL fpureg|to \1\xDC\10\xC8 8086,FPU
+FMUL fpureg,fpu0 \1\xDC\10\xC8 8086,FPU
+FMUL fpureg \1\xD8\10\xC8 8086,FPU
+FMUL fpu0,fpureg \1\xD8\11\xC8 8086,FPU
+FMUL void \2\xDE\xC9 8086,FPU,ND
+FMULP fpureg \1\xDE\10\xC8 8086,FPU
+FMULP fpureg,fpu0 \1\xDE\10\xC8 8086,FPU
+FMULP void \2\xDE\xC9 8086,FPU,ND
+FNCLEX void \2\xDB\xE2 8086,FPU
+FNDISI void \2\xDB\xE1 8086,FPU
+FNENI void \2\xDB\xE0 8086,FPU
+FNINIT void \2\xDB\xE3 8086,FPU
+FNOP void \2\xD9\xD0 8086,FPU
+FNSAVE mem \1\xDD\206 8086,FPU
+FNSTCW mem \1\xD9\207 8086,FPU,SW
+FNSTENV mem \1\xD9\206 8086,FPU
+FNSTSW mem \1\xDD\207 8086,FPU,SW
+FNSTSW reg_ax \2\xDF\xE0 286,FPU
+FPATAN void \2\xD9\xF3 8086,FPU
+FPREM void \2\xD9\xF8 8086,FPU
+FPREM1 void \2\xD9\xF5 386,FPU
+FPTAN void \2\xD9\xF2 8086,FPU
+FRNDINT void \2\xD9\xFC 8086,FPU
+FRSTOR mem \1\xDD\204 8086,FPU
+FSAVE mem \341\1\xDD\206 8086,FPU
+FSCALE void \2\xD9\xFD 8086,FPU
+FSETPM void \2\xDB\xE4 286,FPU
+FSIN void \2\xD9\xFE 386,FPU
+FSINCOS void \2\xD9\xFB 386,FPU
+FSQRT void \2\xD9\xFA 8086,FPU
+FST mem32 \1\xD9\202 8086,FPU
+FST mem64 \1\xDD\202 8086,FPU
+FST fpureg \1\xDD\10\xD0 8086,FPU
+FST void \2\xDD\xD1 8086,FPU,ND
+FSTCW mem \341\1\xD9\207 8086,FPU,SW
+FSTENV mem \341\1\xD9\206 8086,FPU
+FSTP mem32 \1\xD9\203 8086,FPU
+FSTP mem64 \1\xDD\203 8086,FPU
+FSTP mem80 \1\xDB\207 8086,FPU
+FSTP fpureg \1\xDD\10\xD8 8086,FPU
+FSTP void \2\xDD\xD9 8086,FPU,ND
+FSTSW mem \341\1\xDD\207 8086,FPU,SW
+FSTSW reg_ax \341\2\xDF\xE0 286,FPU
+FSUB mem32 \1\xD8\204 8086,FPU
+FSUB mem64 \1\xDC\204 8086,FPU
+FSUB fpureg|to \1\xDC\10\xE8 8086,FPU
+FSUB fpureg,fpu0 \1\xDC\10\xE8 8086,FPU
+FSUB fpureg \1\xD8\10\xE0 8086,FPU
+FSUB fpu0,fpureg \1\xD8\11\xE0 8086,FPU
+FSUB void \2\xDE\xE9 8086,FPU,ND
+FSUBP fpureg \1\xDE\10\xE8 8086,FPU
+FSUBP fpureg,fpu0 \1\xDE\10\xE8 8086,FPU
+FSUBP void \2\xDE\xE9 8086,FPU,ND
+FSUBR mem32 \1\xD8\205 8086,FPU
+FSUBR mem64 \1\xDC\205 8086,FPU
+FSUBR fpureg|to \1\xDC\10\xE0 8086,FPU
+FSUBR fpureg,fpu0 \1\xDC\10\xE0 8086,FPU
+FSUBR fpureg \1\xD8\10\xE8 8086,FPU
+FSUBR fpu0,fpureg \1\xD8\11\xE8 8086,FPU
+FSUBR void \2\xDE\xE1 8086,FPU,ND
+FSUBRP fpureg \1\xDE\10\xE0 8086,FPU
+FSUBRP fpureg,fpu0 \1\xDE\10\xE0 8086,FPU
+FSUBRP void \2\xDE\xE1 8086,FPU,ND
+FTST void \2\xD9\xE4 8086,FPU
+FUCOM fpureg \1\xDD\10\xE0 386,FPU
+FUCOM fpu0,fpureg \1\xDD\11\xE0 386,FPU
+FUCOM void \2\xDD\xE1 386,FPU,ND
+FUCOMI fpureg \1\xDB\10\xE8 P6,FPU
+FUCOMI fpu0,fpureg \1\xDB\11\xE8 P6,FPU
+FUCOMI void \2\xDB\xE9 P6,FPU,ND
+FUCOMIP fpureg \1\xDF\10\xE8 P6,FPU
+FUCOMIP fpu0,fpureg \1\xDF\11\xE8 P6,FPU
+FUCOMIP void \2\xDF\xE9 P6,FPU,ND
+FUCOMP fpureg \1\xDD\10\xE8 386,FPU
+FUCOMP fpu0,fpureg \1\xDD\11\xE8 386,FPU
+FUCOMP void \2\xDD\xE9 386,FPU,ND
+FUCOMPP void \2\xDA\xE9 386,FPU
+FXAM void \2\xD9\xE5 8086,FPU
+FXCH fpureg \1\xD9\10\xC8 8086,FPU
+FXCH fpureg,fpu0 \1\xD9\10\xC8 8086,FPU
+FXCH fpu0,fpureg \1\xD9\11\xC8 8086,FPU
+FXCH void \2\xD9\xC9 8086,FPU,ND
+FXTRACT void \2\xD9\xF4 8086,FPU
+FYL2X void \2\xD9\xF1 8086,FPU
+FYL2XP1 void \2\xD9\xF9 8086,FPU
+HLT void \1\xF4 8086,PRIV
+IBTS mem,reg16 \320\2\x0F\xA7\101 386,SW,UNDOC,ND
+IBTS reg16,reg16 \320\2\x0F\xA7\101 386,UNDOC,ND
+IBTS mem,reg32 \321\2\x0F\xA7\101 386,SD,UNDOC,ND
+IBTS reg32,reg32 \321\2\x0F\xA7\101 386,UNDOC,ND
+ICEBP void \1\xF1 386,ND
+IDIV rm8 \1\xF6\207 8086
+IDIV rm16 \320\1\xF7\207 8086
+IDIV rm32 \321\1\xF7\207 386
+IDIV rm64 \324\1\xF7\207 X64
+IMUL rm8 \1\xF6\205 8086
+IMUL rm16 \320\1\xF7\205 8086
+IMUL rm32 \321\1\xF7\205 386
+IMUL rm64 \324\1\xF7\205 X64
+IMUL reg16,mem \320\2\x0F\xAF\110 386,SM
+IMUL reg16,reg16 \320\2\x0F\xAF\110 386
+IMUL reg32,mem \321\2\x0F\xAF\110 386,SM
+IMUL reg32,reg32 \321\2\x0F\xAF\110 386
+IMUL reg64,mem \324\2\x0F\xAF\110 X64,SM
+IMUL reg64,reg64 \324\2\x0F\xAF\110 X64
+IMUL reg16,mem,imm8 \320\1\x6B\110\16 186,SM
+IMUL reg16,mem,sbyte16 \320\1\x6B\110\16 186,SM,ND
+IMUL reg16,mem,imm16 \320\1\x69\110\32 186,SM
+IMUL reg16,mem,imm \320\146\x69\110\142 186,SM,ND
+IMUL reg16,reg16,imm8 \320\1\x6B\110\16 186
+IMUL reg16,reg16,sbyte16 \320\1\x6B\110\16 186,SM,ND
+IMUL reg16,reg16,imm16 \320\1\x69\110\32 186
+IMUL reg16,reg16,imm \320\146\x69\110\142 186,SM,ND
+IMUL reg32,mem,imm8 \321\1\x6B\110\16 386,SM
+IMUL reg32,mem,sbyte32 \321\1\x6B\110\16 386,SM,ND
+IMUL reg32,mem,imm32 \321\1\x69\110\42 386,SM
+IMUL reg32,mem,imm \321\156\x69\110\152 386,SM,ND
+IMUL reg32,reg32,imm8 \321\1\x6B\110\16 386
+IMUL reg32,reg32,sbyte32 \321\1\x6B\110\16 386,SM,ND
+IMUL reg32,reg32,imm32 \321\1\x69\110\42 386
+IMUL reg32,reg32,imm \321\156\x69\110\152 386,SM,ND
+IMUL reg64,mem,imm8 \324\1\x6B\110\16 X64,SM
+IMUL reg64,mem,sbyte64 \324\1\x6B\110\16 X64,SM,ND
+IMUL reg64,mem,imm32 \324\1\x69\110\42 X64,SM
+IMUL reg64,mem,imm \324\156\x69\110\252 X64,SM,ND
+IMUL reg64,reg64,imm8 \324\1\x6B\110\16 X64
+IMUL reg64,reg64,sbyte64 \324\1\x6B\110\16 X64,SM,ND
+IMUL reg64,reg64,imm32 \324\1\x69\110\42 X64
+IMUL reg64,reg64,imm \324\156\x69\110\252 X64,SM,ND
+IMUL reg16,imm8 \320\1\x6B\100\15 186
+IMUL reg16,sbyte16 \320\1\x6B\100\15 186,SM,ND
+IMUL reg16,imm16 \320\1\x69\100\31 186
+IMUL reg16,imm \320\145\x69\100\141 186,SM,ND
+IMUL reg32,imm8 \321\1\x6B\100\15 386
+IMUL reg32,sbyte32 \321\1\x6B\100\15 386,SM,ND
+IMUL reg32,imm32 \321\1\x69\100\41 386
+IMUL reg32,imm \321\155\x69\100\151 386,SM,ND
+IMUL reg64,imm8 \324\1\x6B\100\15 X64
+IMUL reg64,sbyte64 \324\1\x6B\100\15 X64,SM,ND
+IMUL reg64,imm32 \324\1\x69\100\255 X64
+IMUL reg64,imm \324\155\x69\100\251 X64,SM,ND
+IN reg_al,imm \1\xE4\25 8086,SB
+IN reg_ax,imm \320\1\xE5\25 8086,SB
+IN reg_eax,imm \321\1\xE5\25 386,SB
+IN reg_al,reg_dx \1\xEC 8086
+IN reg_ax,reg_dx \320\1\xED 8086
+IN reg_eax,reg_dx \321\1\xED 386
+INC reg16 \320\10\x40 8086,NOLONG
+INC reg32 \321\10\x40 386,NOLONG
+INC rm8 \1\xFE\200 8086
+INC rm16 \320\1\xFF\200 8086
+INC rm32 \321\1\xFF\200 386
+INC rm64 \324\1\xFF\200 X64
+INCBIN ignore ignore ignore
+INSB void \1\x6C 186
+INSD void \321\1\x6D 386
+INSW void \320\1\x6D 186
+INT imm \1\xCD\24 8086,SB
+INT01 void \1\xF1 386,ND
+INT1 void \1\xF1 386
+INT03 void \1\xCC 8086,ND
+INT3 void \1\xCC 8086
+INTO void \1\xCE 8086,NOLONG
+INVD void \2\x0F\x08 486,PRIV
+INVLPG mem \2\x0F\x01\207 486,PRIV
+INVLPGA reg_ax,reg_ecx \310\3\x0F\x01\xDF X86_64,AMD,NOLONG
+INVLPGA reg_eax,reg_ecx \311\3\x0F\x01\xDF X86_64,AMD
+INVLPGA reg_rax,reg_ecx \323\313\3\x0F\x01\xDF X64,AMD
+INVLPGA void \3\x0F\x01\xDF X86_64,AMD
+IRET void \322\1\xCF 8086
+IRETD void \321\1\xCF 386
+IRETQ void \324\1\xCF X64
+IRETW void \320\1\xCF 8086
+JCXZ imm \310\1\xE3\50 8086,NOLONG
+JECXZ imm \311\1\xE3\50 386
+JRCXZ imm \313\1\xE3\50 X64
+JMP imm|short \1\xEB\50 8086
+JMP imm \371\1\xEB\50 8086,ND
+JMP imm \322\1\xE9\64 8086
+JMP imm|near \322\1\xE9\64 8086,ND
+JMP imm|far \322\1\xEA\34\74 8086,ND,NOLONG
+JMP imm16 \320\1\xE9\64 8086
+JMP imm16|near \320\1\xE9\64 8086,ND
+JMP imm16|far \320\1\xEA\34\74 8086,ND,NOLONG
+JMP imm32 \321\1\xE9\64 386
+JMP imm32|near \321\1\xE9\64 386,ND
+JMP imm32|far \321\1\xEA\34\74 386,ND,NOLONG
+JMP imm:imm \322\1\xEA\35\30 8086,NOLONG
+JMP imm16:imm \320\1\xEA\31\30 8086,NOLONG
+JMP imm:imm16 \320\1\xEA\31\30 8086,NOLONG
+JMP imm32:imm \321\1\xEA\41\30 386,NOLONG
+JMP imm:imm32 \321\1\xEA\41\30 386,NOLONG
+JMP mem|far \322\1\xFF\205 8086,NOLONG
+JMP mem|far \324\1\xFF\205 X64
+JMP mem16|far \320\1\xFF\205 8086
+JMP mem32|far \321\1\xFF\205 386
+JMP mem64|far \324\1\xFF\205 X64
+JMP mem|near \322\1\xFF\204 8086
+JMP mem16|near \320\1\xFF\204 8086
+JMP mem32|near \321\1\xFF\204 386,NOLONG
+JMP mem64|near \323\1\xFF\204 X64
+JMP reg16 \320\1\xFF\204 8086
+JMP reg32 \321\1\xFF\204 386,NOLONG
+JMP reg64 \323\1\xFF\204 X64
+JMP mem \322\1\xFF\204 8086
+JMP mem16 \320\1\xFF\204 8086
+JMP mem32 \321\1\xFF\204 386,NOLONG
+JMP mem64 \323\1\xFF\204 X64
+JMPE imm \322\2\x0F\xB8\64 IA64
+JMPE imm16 \320\2\x0F\xB8\64 IA64
+JMPE imm32 \321\2\x0F\xB8\64 IA64
+JMPE rm16 \320\2\x0F\x00\206 IA64
+JMPE rm32 \321\2\x0F\x00\206 IA64
+LAHF void \1\x9F 8086
+LAR reg16,mem \320\2\x0F\x02\110 286,PROT,SW
+LAR reg16,reg16 \320\2\x0F\x02\110 286,PROT
+LAR reg16,reg32 \320\2\x0F\x02\110 386,PROT
+LAR reg16,reg64 \320\323\2\x0F\x02\110 X64,PROT,ND
+LAR reg32,mem \321\2\x0F\x02\110 386,PROT,SW
+LAR reg32,reg16 \321\2\x0F\x02\110 386,PROT
+LAR reg32,reg32 \321\2\x0F\x02\110 386,PROT
+LAR reg32,reg64 \321\323\2\x0F\x02\110 X64,PROT,ND
+LAR reg64,mem \324\2\x0F\x02\110 X64,PROT,SW
+LAR reg64,reg16 \324\2\x0F\x02\110 X64,PROT
+LAR reg64,reg32 \324\2\x0F\x02\110 X64,PROT
+LAR reg64,reg64 \324\2\x0F\x02\110 X64,PROT
+LDS reg16,mem \320\1\xC5\110 8086,NOLONG
+LDS reg32,mem \321\1\xC5\110 386,NOLONG
+LEA reg16,mem \320\1\x8D\110 8086
+LEA reg32,mem \321\1\x8D\110 386
+LEA reg64,mem \324\1\x8D\110 X64
+LEAVE void \1\xC9 186
+LES reg16,mem \320\1\xC4\110 8086,NOLONG
+LES reg32,mem \321\1\xC4\110 386,NOLONG
+LFENCE void \3\x0F\xAE\xE8 X64,AMD
+LFS reg16,mem \320\2\x0F\xB4\110 386
+LFS reg32,mem \321\2\x0F\xB4\110 386
+LFS reg64,mem \324\2\x0F\xB4\110 X64
+LGDT mem \2\x0F\x01\202 286,PRIV
+LGS reg16,mem \320\2\x0F\xB5\110 386
+LGS reg32,mem \321\2\x0F\xB5\110 386
+LGS reg64,mem \324\2\x0F\xB5\110 X64
+LIDT mem \2\x0F\x01\203 286,PRIV
+LLDT mem \2\x0F\x00\202 286,PROT,PRIV
+LLDT mem16 \2\x0F\x00\202 286,PROT,PRIV
+LLDT reg16 \2\x0F\x00\202 286,PROT,PRIV
+LMSW mem \2\x0F\x01\206 286,PRIV
+LMSW mem16 \2\x0F\x01\206 286,PRIV
+LMSW reg16 \2\x0F\x01\206 286,PRIV
+LOADALL void \2\x0F\x07 386,UNDOC
+LOADALL286 void \2\x0F\x05 286,UNDOC
+LODSB void \1\xAC 8086
+LODSD void \321\1\xAD 386
+LODSQ void \324\1\xAD X64
+LODSW void \320\1\xAD 8086
+LOOP imm \312\1\xE2\50 8086
+LOOP imm,reg_cx \310\1\xE2\50 8086,NOLONG
+LOOP imm,reg_ecx \311\1\xE2\50 386
+LOOP imm,reg_rcx \313\1\xE2\50 X64
+LOOPE imm \312\1\xE1\50 8086
+LOOPE imm,reg_cx \310\1\xE1\50 8086,NOLONG
+LOOPE imm,reg_ecx \311\1\xE1\50 386
+LOOPE imm,reg_rcx \313\1\xE1\50 X64
+LOOPNE imm \312\1\xE0\50 8086
+LOOPNE imm,reg_cx \310\1\xE0\50 8086,NOLONG
+LOOPNE imm,reg_ecx \311\1\xE0\50 386
+LOOPNE imm,reg_rcx \313\1\xE0\50 X64
+LOOPNZ imm \312\1\xE0\50 8086
+LOOPNZ imm,reg_cx \310\1\xE0\50 8086,NOLONG
+LOOPNZ imm,reg_ecx \311\1\xE0\50 386
+LOOPNZ imm,reg_rcx \313\1\xE0\50 X64
+LOOPZ imm \312\1\xE1\50 8086
+LOOPZ imm,reg_cx \310\1\xE1\50 8086,NOLONG
+LOOPZ imm,reg_ecx \311\1\xE1\50 386
+LOOPZ imm,reg_rcx \313\1\xE1\50 X64
+LSL reg16,mem \320\2\x0F\x03\110 286,PROT,SW
+LSL reg16,reg16 \320\2\x0F\x03\110 286,PROT
+LSL reg16,reg32 \320\2\x0F\x03\110 386,PROT
+LSL reg16,reg64 \320\323\2\x0F\x03\110 X64,PROT,ND
+LSL reg32,mem \321\2\x0F\x03\110 386,PROT,SW
+LSL reg32,reg16 \321\2\x0F\x03\110 386,PROT
+LSL reg32,reg32 \321\2\x0F\x03\110 386,PROT
+LSL reg32,reg64 \321\323\2\x0F\x03\110 X64,PROT,ND
+LSL reg64,mem \324\2\x0F\x03\110 X64,PROT,SW
+LSL reg64,reg16 \324\2\x0F\x03\110 X64,PROT
+LSL reg64,reg32 \324\2\x0F\x03\110 X64,PROT
+LSL reg64,reg64 \324\2\x0F\x03\110 X64,PROT
+LSS reg16,mem \320\2\x0F\xB2\110 386
+LSS reg32,mem \321\2\x0F\xB2\110 386
+LSS reg64,mem \324\2\x0F\xB2\110 X64
+LTR mem \2\x0F\x00\203 286,PROT,PRIV
+LTR mem16 \2\x0F\x00\203 286,PROT,PRIV
+LTR reg16 \2\x0F\x00\203 286,PROT,PRIV
+MFENCE void \3\x0F\xAE\xF0 X64,AMD
+MONITOR void \3\x0F\x01\xC8 PRESCOTT
+MONITOR reg_eax,reg_ecx,reg_edx \3\x0F\x01\xC8 PRESCOTT,ND
+MONITOR reg_rax,reg_ecx,reg_edx \3\x0F\x01\xC8 X64,ND
+MOV mem,reg_sreg \1\x8C\101 8086,SM
+MOV reg16,reg_sreg \320\1\x8C\101 8086
+MOV reg32,reg_sreg \321\1\x8C\101 386
+MOV reg_sreg,mem \1\x8E\110 8086,SM
+MOV reg_sreg,reg16 \1\x8E\110 8086
+MOV reg_sreg,reg32 \1\x8E\110 386
+MOV reg_al,mem_offs \1\xA0\45 8086,SM
+MOV reg_ax,mem_offs \320\1\xA1\45 8086,SM
+MOV reg_eax,mem_offs \321\1\xA1\45 386,SM
+MOV reg_rax,mem_offs \324\1\xA1\45 X64,SM
+MOV mem_offs,reg_al \1\xA2\44 8086,SM
+MOV mem_offs,reg_ax \320\1\xA3\44 8086,SM
+MOV mem_offs,reg_eax \321\1\xA3\44 386,SM
+MOV mem_offs,reg_rax \324\1\xA3\44 X64,SM
+MOV reg32,reg_creg \334\2\x0F\x20\101 386,PRIV,NOLONG
+MOV reg64,reg_creg \323\2\x0F\x20\101 X64,PRIV
+MOV reg_creg,reg32 \334\2\x0F\x22\110 386,PRIV,NOLONG
+MOV reg_creg,reg64 \323\2\x0F\x22\110 X64,PRIV
+MOV reg32,reg_dreg \2\x0F\x21\101 386,PRIV,NOLONG
+MOV reg64,reg_dreg \323\2\x0F\x21\101 X64,PRIV
+MOV reg_dreg,reg32 \2\x0F\x23\110 386,PRIV,NOLONG
+MOV reg_dreg,reg64 \323\2\x0F\x23\110 X64,PRIV
+MOV reg32,reg_treg \2\x0F\x24\101 386,NOLONG,ND
+MOV reg_treg,reg32 \2\x0F\x26\110 386,NOLONG,ND
+MOV mem,reg8 \1\x88\101 8086,SM
+MOV reg8,reg8 \1\x88\101 8086
+MOV mem,reg16 \320\1\x89\101 8086,SM
+MOV reg16,reg16 \320\1\x89\101 8086
+MOV mem,reg32 \321\1\x89\101 386,SM
+MOV reg32,reg32 \321\1\x89\101 386
+MOV mem,reg64 \324\1\x89\101 X64,SM
+MOV reg64,reg64 \324\1\x89\101 X64
+MOV reg8,mem \1\x8A\110 8086,SM
+MOV reg8,reg8 \1\x8A\110 8086
+MOV reg16,mem \320\1\x8B\110 8086,SM
+MOV reg16,reg16 \320\1\x8B\110 8086
+MOV reg32,mem \321\1\x8B\110 386,SM
+MOV reg32,reg32 \321\1\x8B\110 386
+MOV reg64,mem \324\1\x8B\110 X64,SM
+MOV reg64,reg64 \324\1\x8B\110 X64
+MOV reg8,imm \10\xB0\21 8086,SM
+MOV reg16,imm \320\10\xB8\31 8086,SM
+MOV reg32,imm \321\10\xB8\41 386,SM
+MOV reg64,imm \324\10\xB8\55 X64,SM
+MOV rm8,imm \1\xC6\200\21 8086,SM
+MOV rm16,imm \320\1\xC7\200\31 8086,SM
+MOV rm32,imm \321\1\xC7\200\41 386,SM
+MOV rm64,imm \324\1\xC7\200\255 X64,SM
+MOV rm64,imm32 \324\1\xC7\200\255 X64
+MOV mem,imm8 \1\xC6\200\21 8086,SM
+MOV mem,imm16 \320\1\xC7\200\31 8086,SM
+MOV mem,imm32 \321\1\xC7\200\41 386,SM
+MOVD mmxreg,mem \360\2\x0F\x6E\110 PENT,MMX,SD
+MOVD mmxreg,reg32 \360\2\x0F\x6E\110 PENT,MMX
+MOVD mem,mmxreg \360\2\x0F\x7E\101 PENT,MMX,SD
+MOVD reg32,mmxreg \360\2\x0F\x7E\101 PENT,MMX
+MOVD xmmreg,mem \360\320\2\x0F\x6E\110 X64,SD
+MOVD xmmreg,reg32 \360\320\2\x0F\x6E\110 X64
+MOVD mem,xmmreg \360\320\2\x0F\x7E\101 X64,SD
+MOVD reg32,xmmreg \360\320\2\x0F\x7E\101 X64,SSE
+MOVQ mmxreg,mmxrm \360\323\2\x0F\x6F\110 PENT,MMX,SQ
+MOVQ mmxrm,mmxreg \360\323\2\x0F\x7F\101 PENT,MMX,SQ
+MOVQ mmxreg,rm64 \360\2\x0F\x6E\110 X64,MMX
+MOVQ rm64,mmxreg \360\2\x0F\x7E\101 X64,MMX
+MOVSB void \1\xA4 8086
+MOVSD void \321\1\xA5 386
+MOVSQ void \324\1\xA5 X64
+MOVSW void \320\1\xA5 8086
+MOVSX reg16,mem \320\2\x0F\xBE\110 386,SB
+MOVSX reg16,reg8 \320\2\x0F\xBE\110 386
+MOVSX reg32,rm8 \321\2\x0F\xBE\110 386
+MOVSX reg32,rm16 \321\2\x0F\xBF\110 386
+MOVSX reg64,rm8 \324\2\x0F\xBE\110 X64
+MOVSX reg64,rm16 \324\2\x0F\xBF\110 X64
+MOVSXD reg64,rm32 \324\1\x63\110 X64
+MOVSX reg64,rm32 \324\1\x63\110 X64,ND
+MOVZX reg16,mem \320\2\x0F\xB6\110 386,SB
+MOVZX reg16,reg8 \320\2\x0F\xB6\110 386
+MOVZX reg32,rm8 \321\2\x0F\xB6\110 386
+MOVZX reg32,rm16 \321\2\x0F\xB7\110 386
+MOVZX reg64,rm8 \324\2\x0F\xB6\110 X64
+MOVZX reg64,rm16 \324\2\x0F\xB7\110 X64
+MUL rm8 \1\xF6\204 8086
+MUL rm16 \320\1\xF7\204 8086
+MUL rm32 \321\1\xF7\204 386
+MUL rm64 \324\1\xF7\204 X64
+MWAIT void \3\x0F\x01\xC9 PRESCOTT
+MWAIT reg_eax,reg_ecx \3\x0F\x01\xC9 PRESCOTT,ND
+NEG rm8 \1\xF6\203 8086
+NEG rm16 \320\1\xF7\203 8086
+NEG rm32 \321\1\xF7\203 386
+NEG rm64 \324\1\xF7\203 X64
+NOP void \314\1\x90 8086
+NOP rm16 \320\2\x0F\x1F\200 P6
+NOP rm32 \321\2\x0F\x1F\200 P6
+NOP rm64 \324\2\x0F\x1F\200 X64
+NOT rm8 \1\xF6\202 8086
+NOT rm16 \320\1\xF7\202 8086
+NOT rm32 \321\1\xF7\202 386
+NOT rm64 \324\1\xF7\202 X64
+OR mem,reg8 \1\x08\101 8086,SM
+OR reg8,reg8 \1\x08\101 8086
+OR mem,reg16 \320\1\x09\101 8086,SM
+OR reg16,reg16 \320\1\x09\101 8086
+OR mem,reg32 \321\1\x09\101 386,SM
+OR reg32,reg32 \321\1\x09\101 386
+OR mem,reg64 \324\1\x09\101 X64,SM
+OR reg64,reg64 \324\1\x09\101 X64
+OR reg8,mem \1\x0A\110 8086,SM
+OR reg8,reg8 \1\x0A\110 8086
+OR reg16,mem \320\1\x0B\110 8086,SM
+OR reg16,reg16 \320\1\x0B\110 8086
+OR reg32,mem \321\1\x0B\110 386,SM
+OR reg32,reg32 \321\1\x0B\110 386
+OR reg64,mem \324\1\x0B\110 X64,SM
+OR reg64,reg64 \324\1\x0B\110 X64
+OR rm16,imm8 \320\1\x83\201\275 8086
+OR rm32,imm8 \321\1\x83\201\275 386
+OR rm64,imm8 \324\1\x83\201\275 X64
+OR reg_al,imm \1\x0C\21 8086,SM
+OR reg_ax,sbyte16 \320\1\x83\201\275 8086,SM
+OR reg_ax,imm \320\1\x0D\31 8086,SM
+OR reg_eax,sbyte32 \321\1\x83\201\275 386,SM
+OR reg_eax,imm \321\1\x0D\41 386,SM
+OR reg_rax,sbyte64 \324\1\x83\201\275 X64,SM
+OR reg_rax,imm \324\1\x0D\255 X64,SM
+OR rm8,imm \1\x80\201\21 8086,SM
+OR rm16,imm \320\145\x81\201\141 8086,SM
+OR rm32,imm \321\155\x81\201\151 386,SM
+OR rm64,imm \324\155\x81\201\251 X64,SM
+OR mem,imm8 \1\x80\201\21 8086,SM
+OR mem,imm16 \320\145\x81\201\141 8086,SM
+OR mem,imm32 \321\155\x81\201\151 386,SM
+OUT imm,reg_al \1\xE6\24 8086,SB
+OUT imm,reg_ax \320\1\xE7\24 8086,SB
+OUT imm,reg_eax \321\1\xE7\24 386,SB
+OUT reg_dx,reg_al \1\xEE 8086
+OUT reg_dx,reg_ax \320\1\xEF 8086
+OUT reg_dx,reg_eax \321\1\xEF 386
+OUTSB void \1\x6E 186
+OUTSD void \321\1\x6F 386
+OUTSW void \320\1\x6F 186
+PACKSSDW mmxreg,mmxrm \360\323\2\x0F\x6B\110 PENT,MMX,SQ
+PACKSSWB mmxreg,mmxrm \360\323\2\x0F\x63\110 PENT,MMX,SQ
+PACKUSWB mmxreg,mmxrm \360\323\2\x0F\x67\110 PENT,MMX,SQ
+PADDB mmxreg,mmxrm \360\323\2\x0F\xFC\110 PENT,MMX,SQ
+PADDD mmxreg,mmxrm \360\323\2\x0F\xFE\110 PENT,MMX,SQ
+PADDSB mmxreg,mmxrm \360\323\2\x0F\xEC\110 PENT,MMX,SQ
+PADDSIW mmxreg,mmxrm \323\2\x0F\x51\110 PENT,MMX,SQ,CYRIX
+PADDSW mmxreg,mmxrm \360\323\2\x0F\xED\110 PENT,MMX,SQ
+PADDUSB mmxreg,mmxrm \360\323\2\x0F\xDC\110 PENT,MMX,SQ
+PADDUSW mmxreg,mmxrm \360\323\2\x0F\xDD\110 PENT,MMX,SQ
+PADDW mmxreg,mmxrm \360\323\2\x0F\xFD\110 PENT,MMX,SQ
+PAND mmxreg,mmxrm \360\323\2\x0F\xDB\110 PENT,MMX,SQ
+PANDN mmxreg,mmxrm \360\323\2\x0F\xDF\110 PENT,MMX,SQ
+PAUSE void \314\333\1\x90 8086
+PAVEB mmxreg,mmxrm \323\2\x0F\x50\110 PENT,MMX,SQ,CYRIX
+PAVGUSB mmxreg,mmxrm \323\2\x0F\x0F\110\01\xBF PENT,3DNOW,SQ
+PCMPEQB mmxreg,mmxrm \360\323\2\x0F\x74\110 PENT,MMX,SQ
+PCMPEQD mmxreg,mmxrm \360\323\2\x0F\x76\110 PENT,MMX,SQ
+PCMPEQW mmxreg,mmxrm \360\323\2\x0F\x75\110 PENT,MMX,SQ
+PCMPGTB mmxreg,mmxrm \360\323\2\x0F\x64\110 PENT,MMX,SQ
+PCMPGTD mmxreg,mmxrm \360\323\2\x0F\x66\110 PENT,MMX,SQ
+PCMPGTW mmxreg,mmxrm \360\323\2\x0F\x65\110 PENT,MMX,SQ
+PDISTIB mmxreg,mem \2\x0F\x54\110 PENT,MMX,SM,CYRIX
+PF2ID mmxreg,mmxrm \323\2\x0F\x0F\110\01\x1D PENT,3DNOW,SQ
+PFACC mmxreg,mmxrm \323\2\x0F\x0F\110\01\xAE PENT,3DNOW,SQ
+PFADD mmxreg,mmxrm \323\2\x0F\x0F\110\01\x9E PENT,3DNOW,SQ
+PFCMPEQ mmxreg,mmxrm \323\2\x0F\x0F\110\01\xB0 PENT,3DNOW,SQ
+PFCMPGE mmxreg,mmxrm \323\2\x0F\x0F\110\01\x90 PENT,3DNOW,SQ
+PFCMPGT mmxreg,mmxrm \323\2\x0F\x0F\110\01\xA0 PENT,3DNOW,SQ
+PFMAX mmxreg,mmxrm \323\2\x0F\x0F\110\01\xA4 PENT,3DNOW,SQ
+PFMIN mmxreg,mmxrm \323\2\x0F\x0F\110\01\x94 PENT,3DNOW,SQ
+PFMUL mmxreg,mmxrm \323\2\x0F\x0F\110\01\xB4 PENT,3DNOW,SQ
+PFRCP mmxreg,mmxrm \323\2\x0F\x0F\110\01\x96 PENT,3DNOW,SQ
+PFRCPIT1 mmxreg,mmxrm \323\2\x0F\x0F\110\01\xA6 PENT,3DNOW,SQ
+PFRCPIT2 mmxreg,mmxrm \323\2\x0F\x0F\110\01\xB6 PENT,3DNOW,SQ
+PFRSQIT1 mmxreg,mmxrm \323\2\x0F\x0F\110\01\xA7 PENT,3DNOW,SQ
+PFRSQRT mmxreg,mmxrm \323\2\x0F\x0F\110\01\x97 PENT,3DNOW,SQ
+PFSUB mmxreg,mmxrm \323\2\x0F\x0F\110\01\x9A PENT,3DNOW,SQ
+PFSUBR mmxreg,mmxrm \323\2\x0F\x0F\110\01\xAA PENT,3DNOW,SQ
+PI2FD mmxreg,mmxrm \323\2\x0F\x0F\110\01\x0D PENT,3DNOW,SQ
+PMACHRIW mmxreg,mem \2\x0F\x5E\110 PENT,MMX,SM,CYRIX
+PMADDWD mmxreg,mmxrm \360\323\2\x0F\xF5\110 PENT,MMX,SQ
+PMAGW mmxreg,mmxrm \323\2\x0F\x52\110 PENT,MMX,SQ,CYRIX
+PMULHRIW mmxreg,mmxrm \323\2\x0F\x5D\110 PENT,MMX,SQ,CYRIX
+PMULHRWA mmxreg,mmxrm \323\2\x0F\x0F\110\1\xB7 PENT,3DNOW,SQ
+PMULHRWC mmxreg,mmxrm \323\2\x0F\x59\110 PENT,MMX,SQ,CYRIX
+PMULHW mmxreg,mmxrm \360\323\2\x0F\xE5\110 PENT,MMX,SQ
+PMULLW mmxreg,mmxrm \360\323\2\x0F\xD5\110 PENT,MMX,SQ
+PMVGEZB mmxreg,mem \2\x0F\x5C\110 PENT,MMX,SQ,CYRIX
+PMVLZB mmxreg,mem \2\x0F\x5B\110 PENT,MMX,SQ,CYRIX
+PMVNZB mmxreg,mem \2\x0F\x5A\110 PENT,MMX,SQ,CYRIX
+PMVZB mmxreg,mem \2\x0F\x58\110 PENT,MMX,SQ,CYRIX
+POP reg16 \320\10\x58 8086
+POP reg32 \321\10\x58 386,NOLONG
+POP reg64 \323\10\x58 X64
+POP rm16 \320\1\x8F\200 8086
+POP rm32 \321\1\x8F\200 386,NOLONG
+POP rm64 \323\1\x8F\200 X64
+POP reg_cs \1\x0F 8086,UNDOC,ND
+POP reg_dess \345 8086,NOLONG
+POP reg_fsgs \1\x0F\347 386
+POPA void \322\1\x61 186,NOLONG
+POPAD void \321\1\x61 386,NOLONG
+POPAW void \320\1\x61 186,NOLONG
+POPF void \322\1\x9D 8086
+POPFD void \321\1\x9D 386,NOLONG
+POPFQ void \321\1\x9D X64
+POPFW void \320\1\x9D 8086
+POR mmxreg,mmxrm \360\323\2\x0F\xEB\110 PENT,MMX,SQ
+PREFETCH mem \2\x0F\x0D\200 PENT,3DNOW,SQ
+PREFETCHW mem \2\x0F\x0D\201 PENT,3DNOW,SQ
+PSLLD mmxreg,mmxrm \360\323\2\x0F\xF2\110 PENT,MMX,SQ
+PSLLD mmxreg,imm \360\2\x0F\x72\206\25 PENT,MMX
+PSLLQ mmxreg,mmxrm \360\323\2\x0F\xF3\110 PENT,MMX,SQ
+PSLLQ mmxreg,imm \360\2\x0F\x73\206\25 PENT,MMX
+PSLLW mmxreg,mmxrm \360\323\2\x0F\xF1\110 PENT,MMX,SQ
+PSLLW mmxreg,imm \360\2\x0F\x71\206\25 PENT,MMX
+PSRAD mmxreg,mmxrm \360\323\2\x0F\xE2\110 PENT,MMX,SQ
+PSRAD mmxreg,imm \360\2\x0F\x72\204\25 PENT,MMX
+PSRAW mmxreg,mmxrm \360\323\2\x0F\xE1\110 PENT,MMX,SQ
+PSRAW mmxreg,imm \360\2\x0F\x71\204\25 PENT,MMX
+PSRLD mmxreg,mmxrm \360\323\2\x0F\xD2\110 PENT,MMX,SQ
+PSRLD mmxreg,imm \360\2\x0F\x72\202\25 PENT,MMX
+PSRLQ mmxreg,mmxrm \360\323\2\x0F\xD3\110 PENT,MMX,SQ
+PSRLQ mmxreg,imm \360\2\x0F\x73\202\25 PENT,MMX
+PSRLW mmxreg,mmxrm \360\323\2\x0F\xD1\110 PENT,MMX,SQ
+PSRLW mmxreg,imm \360\2\x0F\x71\202\25 PENT,MMX
+PSUBB mmxreg,mmxrm \360\323\2\x0F\xF8\110 PENT,MMX,SQ
+PSUBD mmxreg,mmxrm \360\323\2\x0F\xFA\110 PENT,MMX,SQ
+PSUBSB mmxreg,mmxrm \360\323\2\x0F\xE8\110 PENT,MMX,SQ
+PSUBSIW mmxreg,mmxrm \323\2\x0F\x55\110 PENT,MMX,SQ,CYRIX
+PSUBSW mmxreg,mmxrm \360\323\2\x0F\xE9\110 PENT,MMX,SQ
+PSUBUSB mmxreg,mmxrm \360\323\2\x0F\xD8\110 PENT,MMX,SQ
+PSUBUSW mmxreg,mmxrm \360\323\2\x0F\xD9\110 PENT,MMX,SQ
+PSUBW mmxreg,mmxrm \360\323\2\x0F\xF9\110 PENT,MMX,SQ
+PUNPCKHBW mmxreg,mmxrm \360\323\2\x0F\x68\110 PENT,MMX,SQ
+PUNPCKHDQ mmxreg,mmxrm \360\323\2\x0F\x6A\110 PENT,MMX,SQ
+PUNPCKHWD mmxreg,mmxrm \360\323\2\x0F\x69\110 PENT,MMX,SQ
+PUNPCKLBW mmxreg,mmxrm \360\323\2\x0F\x60\110 PENT,MMX,SQ
+PUNPCKLDQ mmxreg,mmxrm \360\323\2\x0F\x62\110 PENT,MMX,SQ
+PUNPCKLWD mmxreg,mmxrm \360\323\2\x0F\x61\110 PENT,MMX,SQ
+PUSH reg16 \320\10\x50 8086
+PUSH reg32 \321\10\x50 386,NOLONG
+PUSH reg64 \323\10\x50 X64
+PUSH rm16 \320\1\xFF\206 8086
+PUSH rm32 \321\1\xFF\206 386,NOLONG
+PUSH rm64 \323\1\xFF\206 X64
+PUSH reg_cs \344 8086,NOLONG
+PUSH reg_dess \344 8086,NOLONG
+PUSH reg_fsgs \1\x0F\346 386
+PUSH imm8 \1\x6A\274 186
+PUSH imm16 \320\144\x68\140 186,AR0,SZ
+PUSH imm32 \321\154\x68\150 386,NOLONG,AR0,SZ
+PUSH imm32 \321\154\x68\150 386,NOLONG,SD
+PUSH imm32 \323\154\x68\250 X64,AR0,SZ
+PUSH imm64 \323\154\x68\250 X64,AR0,SZ
+PUSHA void \322\1\x60 186,NOLONG
+PUSHAD void \321\1\x60 386,NOLONG
+PUSHAW void \320\1\x60 186,NOLONG
+PUSHF void \322\1\x9C 8086
+PUSHFD void \321\1\x9C 386,NOLONG
+PUSHFQ void \321\1\x9C X64
+PUSHFW void \320\1\x9C 8086
+PXOR mmxreg,mmxrm \360\323\2\x0F\xEF\110 PENT,MMX,SQ
+RCL rm8,unity \1\xD0\202 8086
+RCL rm8,reg_cl \1\xD2\202 8086
+RCL rm8,imm \1\xC0\202\25 186,SB
+RCL rm16,unity \320\1\xD1\202 8086
+RCL rm16,reg_cl \320\1\xD3\202 8086
+RCL rm16,imm \320\1\xC1\202\25 186,SB
+RCL rm32,unity \321\1\xD1\202 386
+RCL rm32,reg_cl \321\1\xD3\202 386
+RCL rm32,imm \321\1\xC1\202\25 386,SB
+RCL rm64,unity \324\1\xD1\202 X64
+RCL rm64,reg_cl \324\1\xD3\202 X64
+RCL rm64,imm \324\1\xC1\202\25 X64,SB
+RCR rm8,unity \1\xD0\203 8086
+RCR rm8,reg_cl \1\xD2\203 8086
+RCR rm8,imm \1\xC0\203\25 186,SB
+RCR rm16,unity \320\1\xD1\203 8086
+RCR rm16,reg_cl \320\1\xD3\203 8086
+RCR rm16,imm \320\1\xC1\203\25 186,SB
+RCR rm32,unity \321\1\xD1\203 386
+RCR rm32,reg_cl \321\1\xD3\203 386
+RCR rm32,imm \321\1\xC1\203\25 386,SB
+RCR rm64,unity \324\1\xD1\203 X64
+RCR rm64,reg_cl \324\1\xD3\203 X64
+RCR rm64,imm \324\1\xC1\203\25 X64,SB
+RDSHR rm32 \321\2\x0F\x36\200 P6,CYRIX,SMM
+RDMSR void \2\x0F\x32 PENT,PRIV
+RDPMC void \2\x0F\x33 P6
+RDTSC void \2\x0F\x31 PENT
+RDTSCP void \3\x0F\x01\xF9 X86_64
+RET void \1\xC3 8086
+RET imm \1\xC2\30 8086,SW
+RETF void \1\xCB 8086
+RETF imm \1\xCA\30 8086,SW
+RETN void \1\xC3 8086
+RETN imm \1\xC2\30 8086,SW
+ROL rm8,unity \1\xD0\200 8086
+ROL rm8,reg_cl \1\xD2\200 8086
+ROL rm8,imm \1\xC0\200\25 186,SB
+ROL rm16,unity \320\1\xD1\200 8086
+ROL rm16,reg_cl \320\1\xD3\200 8086
+ROL rm16,imm \320\1\xC1\200\25 186,SB
+ROL rm32,unity \321\1\xD1\200 386
+ROL rm32,reg_cl \321\1\xD3\200 386
+ROL rm32,imm \321\1\xC1\200\25 386,SB
+ROL rm64,unity \324\1\xD1\200 X64
+ROL rm64,reg_cl \324\1\xD3\200 X64
+ROL rm64,imm \324\1\xC1\200\25 X64,SB
+ROR rm8,unity \1\xD0\201 8086
+ROR rm8,reg_cl \1\xD2\201 8086
+ROR rm8,imm \1\xC0\201\25 186,SB
+ROR rm16,unity \320\1\xD1\201 8086
+ROR rm16,reg_cl \320\1\xD3\201 8086
+ROR rm16,imm \320\1\xC1\201\25 186,SB
+ROR rm32,unity \321\1\xD1\201 386
+ROR rm32,reg_cl \321\1\xD3\201 386
+ROR rm32,imm \321\1\xC1\201\25 386,SB
+ROR rm64,unity \324\1\xD1\201 X64
+ROR rm64,reg_cl \324\1\xD3\201 X64
+ROR rm64,imm \324\1\xC1\201\25 X64,SB
+RDM void \2\x0F\x3A P6,CYRIX,ND
+RSDC reg_sreg,mem80 \2\x0F\x79\110 486,CYRIX,SMM
+RSLDT mem80 \2\x0F\x7B\200 486,CYRIX,SMM
+RSM void \2\x0F\xAA PENT,SMM
+RSTS mem80 \2\x0F\x7D\200 486,CYRIX,SMM
+SAHF void \1\x9E 8086
+SAL rm8,unity \1\xD0\204 8086,ND
+SAL rm8,reg_cl \1\xD2\204 8086,ND
+SAL rm8,imm \1\xC0\204\25 186,ND,SB
+SAL rm16,unity \320\1\xD1\204 8086,ND
+SAL rm16,reg_cl \320\1\xD3\204 8086,ND
+SAL rm16,imm \320\1\xC1\204\25 186,ND,SB
+SAL rm32,unity \321\1\xD1\204 386,ND
+SAL rm32,reg_cl \321\1\xD3\204 386,ND
+SAL rm32,imm \321\1\xC1\204\25 386,ND,SB
+SAL rm64,unity \324\1\xD1\204 X64,ND
+SAL rm64,reg_cl \324\1\xD3\204 X64,ND
+SAL rm64,imm \324\1\xC1\204\25 X64,ND,SB
+SALC void \1\xD6 8086,UNDOC
+SAR rm8,unity \1\xD0\207 8086
+SAR rm8,reg_cl \1\xD2\207 8086
+SAR rm8,imm \1\xC0\207\25 186,SB
+SAR rm16,unity \320\1\xD1\207 8086
+SAR rm16,reg_cl \320\1\xD3\207 8086
+SAR rm16,imm \320\1\xC1\207\25 186,SB
+SAR rm32,unity \321\1\xD1\207 386
+SAR rm32,reg_cl \321\1\xD3\207 386
+SAR rm32,imm \321\1\xC1\207\25 386,SB
+SAR rm64,unity \324\1\xD1\207 X64
+SAR rm64,reg_cl \324\1\xD3\207 X64
+SAR rm64,imm \324\1\xC1\207\25 X64,SB
+SBB mem,reg8 \1\x18\101 8086,SM
+SBB reg8,reg8 \1\x18\101 8086
+SBB mem,reg16 \320\1\x19\101 8086,SM
+SBB reg16,reg16 \320\1\x19\101 8086
+SBB mem,reg32 \321\1\x19\101 386,SM
+SBB reg32,reg32 \321\1\x19\101 386
+SBB mem,reg64 \324\1\x19\101 X64,SM
+SBB reg64,reg64 \324\1\x19\101 X64
+SBB reg8,mem \1\x1A\110 8086,SM
+SBB reg8,reg8 \1\x1A\110 8086
+SBB reg16,mem \320\1\x1B\110 8086,SM
+SBB reg16,reg16 \320\1\x1B\110 8086
+SBB reg32,mem \321\1\x1B\110 386,SM
+SBB reg32,reg32 \321\1\x1B\110 386
+SBB reg64,mem \324\1\x1B\110 X64,SM
+SBB reg64,reg64 \324\1\x1B\110 X64
+SBB rm16,imm8 \320\1\x83\203\275 8086
+SBB rm32,imm8 \321\1\x83\203\275 386
+SBB rm64,imm8 \324\1\x83\203\275 X64
+SBB reg_al,imm \1\x1C\21 8086,SM
+SBB reg_ax,sbyte16 \320\1\x83\203\275 8086,SM
+SBB reg_ax,imm \320\1\x1D\31 8086,SM
+SBB reg_eax,sbyte32 \321\1\x83\203\275 386,SM
+SBB reg_eax,imm \321\1\x1D\41 386,SM
+SBB reg_rax,sbyte64 \324\1\x83\203\275 X64,SM
+SBB reg_rax,imm \324\1\x1D\255 X64,SM
+SBB rm8,imm \1\x80\203\21 8086,SM
+SBB rm16,imm \320\145\x81\203\141 8086,SM
+SBB rm32,imm \321\155\x81\203\151 386,SM
+SBB rm64,imm \324\155\x81\203\251 X64,SM
+SBB mem,imm8 \1\x80\203\21 8086,SM
+SBB mem,imm16 \320\145\x81\203\141 8086,SM
+SBB mem,imm32 \321\155\x81\203\151 386,SM
+SCASB void \335\1\xAE 8086
+SCASD void \335\321\1\xAF 386
+SCASQ void \335\324\1\xAF X64
+SCASW void \335\320\1\xAF 8086
+SFENCE void \3\x0F\xAE\xF8 X64,AMD
+SGDT mem \2\x0F\x01\200 286
+SHL rm8,unity \1\xD0\204 8086
+SHL rm8,reg_cl \1\xD2\204 8086
+SHL rm8,imm \1\xC0\204\25 186,SB
+SHL rm16,unity \320\1\xD1\204 8086
+SHL rm16,reg_cl \320\1\xD3\204 8086
+SHL rm16,imm \320\1\xC1\204\25 186,SB
+SHL rm32,unity \321\1\xD1\204 386
+SHL rm32,reg_cl \321\1\xD3\204 386
+SHL rm32,imm \321\1\xC1\204\25 386,SB
+SHL rm64,unity \324\1\xD1\204 X64
+SHL rm64,reg_cl \324\1\xD3\204 X64
+SHL rm64,imm \324\1\xC1\204\25 X64,SB
+SHLD mem,reg16,imm \320\2\x0F\xA4\101\26 386,SM2,SB,AR2
+SHLD reg16,reg16,imm \320\2\x0F\xA4\101\26 386,SM2,SB,AR2
+SHLD mem,reg32,imm \321\2\x0F\xA4\101\26 386,SM2,SB,AR2
+SHLD reg32,reg32,imm \321\2\x0F\xA4\101\26 386,SM2,SB,AR2
+SHLD mem,reg64,imm \324\2\x0F\xA4\101\26 X64,SM2,SB,AR2
+SHLD reg64,reg64,imm \324\2\x0F\xA4\101\26 X64,SM2,SB,AR2
+SHLD mem,reg16,reg_cl \320\2\x0F\xA5\101 386,SM
+SHLD reg16,reg16,reg_cl \320\2\x0F\xA5\101 386
+SHLD mem,reg32,reg_cl \321\2\x0F\xA5\101 386,SM
+SHLD reg32,reg32,reg_cl \321\2\x0F\xA5\101 386
+SHLD mem,reg64,reg_cl \324\2\x0F\xA5\101 X64,SM
+SHLD reg64,reg64,reg_cl \324\2\x0F\xA5\101 X64
+SHR rm8,unity \1\xD0\205 8086
+SHR rm8,reg_cl \1\xD2\205 8086
+SHR rm8,imm \1\xC0\205\25 186,SB
+SHR rm16,unity \320\1\xD1\205 8086
+SHR rm16,reg_cl \320\1\xD3\205 8086
+SHR rm16,imm \320\1\xC1\205\25 186,SB
+SHR rm32,unity \321\1\xD1\205 386
+SHR rm32,reg_cl \321\1\xD3\205 386
+SHR rm32,imm \321\1\xC1\205\25 386,SB
+SHR rm64,unity \324\1\xD1\205 X64
+SHR rm64,reg_cl \324\1\xD3\205 X64
+SHR rm64,imm \324\1\xC1\205\25 X64,SB
+SHRD mem,reg16,imm \320\2\x0F\xAC\101\26 386,SM2,SB,AR2
+SHRD reg16,reg16,imm \320\2\x0F\xAC\101\26 386,SM2,SB,AR2
+SHRD mem,reg32,imm \321\2\x0F\xAC\101\26 386,SM2,SB,AR2
+SHRD reg32,reg32,imm \321\2\x0F\xAC\101\26 386,SM2,SB,AR2
+SHRD mem,reg64,imm \324\2\x0F\xAC\101\26 X64,SM2,SB,AR2
+SHRD reg64,reg64,imm \324\2\x0F\xAC\101\26 X64,SM2,SB,AR2
+SHRD mem,reg16,reg_cl \320\2\x0F\xAD\101 386,SM
+SHRD reg16,reg16,reg_cl \320\2\x0F\xAD\101 386
+SHRD mem,reg32,reg_cl \321\2\x0F\xAD\101 386,SM
+SHRD reg32,reg32,reg_cl \321\2\x0F\xAD\101 386
+SHRD mem,reg64,reg_cl \324\2\x0F\xAD\101 X64,SM
+SHRD reg64,reg64,reg_cl \324\2\x0F\xAD\101 X64
+SIDT mem \2\x0F\x01\201 286
+SLDT mem \2\x0F\x00\200 286
+SLDT mem16 \2\x0F\x00\200 286
+SLDT reg16 \320\2\x0F\x00\200 286
+SLDT reg32 \321\2\x0F\x00\200 386
+SLDT reg64 \323\2\x0F\x00\200 X64,ND
+SLDT reg64 \324\2\x0F\x00\200 X64
+SKINIT void \3\x0F\x01\xDE X64
+SMI void \1\xF1 386,UNDOC
+SMINT void \2\x0F\x38 P6,CYRIX,ND
+; Older Cyrix chips had this; they had to move due to conflict with MMX
+SMINTOLD void \2\x0F\x7E 486,CYRIX,ND
+SMSW mem \2\x0F\x01\204 286
+SMSW mem16 \2\x0F\x01\204 286
+SMSW reg16 \320\2\x0F\x01\204 286
+SMSW reg32 \321\2\x0F\x01\204 386
+STC void \1\xF9 8086
+STD void \1\xFD 8086
+STGI void \3\x0F\x01\xDC X64
+STI void \1\xFB 8086
+STOSB void \1\xAA 8086
+STOSD void \321\1\xAB 386
+STOSQ void \324\1\xAB X64
+STOSW void \320\1\xAB 8086
+STR mem \2\x0F\x00\201 286,PROT
+STR mem16 \2\x0F\x00\201 286,PROT
+STR reg16 \320\2\x0F\x00\201 286,PROT
+STR reg32 \321\2\x0F\x00\201 386,PROT
+STR reg64 \324\2\x0F\x00\201 X64
+SUB mem,reg8 \1\x28\101 8086,SM
+SUB reg8,reg8 \1\x28\101 8086
+SUB mem,reg16 \320\1\x29\101 8086,SM
+SUB reg16,reg16 \320\1\x29\101 8086
+SUB mem,reg32 \321\1\x29\101 386,SM
+SUB reg32,reg32 \321\1\x29\101 386
+SUB mem,reg64 \324\1\x29\101 X64,SM
+SUB reg64,reg64 \324\1\x29\101 X64
+SUB reg8,mem \1\x2A\110 8086,SM
+SUB reg8,reg8 \1\x2A\110 8086
+SUB reg16,mem \320\1\x2B\110 8086,SM
+SUB reg16,reg16 \320\1\x2B\110 8086
+SUB reg32,mem \321\1\x2B\110 386,SM
+SUB reg32,reg32 \321\1\x2B\110 386
+SUB reg64,mem \324\1\x2B\110 X64,SM
+SUB reg64,reg64 \324\1\x2B\110 X64
+SUB rm16,imm8 \320\1\x83\205\275 8086
+SUB rm32,imm8 \321\1\x83\205\275 386
+SUB rm64,imm8 \324\1\x83\205\275 X64
+SUB reg_al,imm \1\x2C\21 8086,SM
+SUB reg_ax,sbyte16 \320\1\x83\205\275 8086,SM
+SUB reg_ax,imm \320\1\x2D\31 8086,SM
+SUB reg_eax,sbyte32 \321\1\x83\205\275 386,SM
+SUB reg_eax,imm \321\1\x2D\41 386,SM
+SUB reg_rax,sbyte64 \324\1\x83\205\275 X64,SM
+SUB reg_rax,imm \324\1\x2D\255 X64,SM
+SUB rm8,imm \1\x80\205\21 8086,SM
+SUB rm16,imm \320\145\x81\205\141 8086,SM
+SUB rm32,imm \321\155\x81\205\151 386,SM
+SUB rm64,imm \324\155\x81\205\251 X64,SM
+SUB mem,imm8 \1\x80\205\21 8086,SM
+SUB mem,imm16 \320\145\x81\205\141 8086,SM
+SUB mem,imm32 \321\155\x81\205\151 386,SM
+SVDC mem80,reg_sreg \2\x0F\x78\101 486,CYRIX,SMM
+SVLDT mem80 \2\x0F\x7A\200 486,CYRIX,SMM,ND
+SVTS mem80 \2\x0F\x7C\200 486,CYRIX,SMM
+SWAPGS void \3\x0F\x01\xF8 X64
+SYSCALL void \2\x0F\x05 P6,AMD
+SYSENTER void \2\x0F\x34 P6
+SYSEXIT void \2\x0F\x35 P6,PRIV
+SYSRET void \2\x0F\x07 P6,PRIV,AMD
+TEST mem,reg8 \1\x84\101 8086,SM
+TEST reg8,reg8 \1\x84\101 8086
+TEST mem,reg16 \320\1\x85\101 8086,SM
+TEST reg16,reg16 \320\1\x85\101 8086
+TEST mem,reg32 \321\1\x85\101 386,SM
+TEST reg32,reg32 \321\1\x85\101 386
+TEST mem,reg64 \324\1\x85\101 X64,SM
+TEST reg64,reg64 \324\1\x85\101 X64
+TEST reg8,mem \1\x84\110 8086,SM
+TEST reg16,mem \320\1\x85\110 8086,SM
+TEST reg32,mem \321\1\x85\110 386,SM
+TEST reg64,mem \324\1\x85\110 X64,SM
+TEST reg_al,imm \1\xA8\21 8086,SM
+TEST reg_ax,imm \320\1\xA9\31 8086,SM
+TEST reg_eax,imm \321\1\xA9\41 386,SM
+TEST reg_rax,imm \324\1\xA9\255 X64,SM
+TEST rm8,imm \1\xF6\200\21 8086,SM
+TEST rm16,imm \320\1\xF7\200\31 8086,SM
+TEST rm32,imm \321\1\xF7\200\41 386,SM
+TEST rm64,imm \324\1\xF7\200\255 X64,SM
+TEST mem,imm8 \1\xF6\200\21 8086,SM
+TEST mem,imm16 \320\1\xF7\200\31 8086,SM
+TEST mem,imm32 \321\1\xF7\200\41 386,SM
+UD0 void \2\x0F\xFF 186,UNDOC
+UD1 void \2\x0F\xB9 186,UNDOC
+UD2B void \2\x0F\xB9 186,UNDOC,ND
+UD2 void \2\x0F\x0B 186
+UD2A void \2\x0F\x0B 186,ND
+UMOV mem,reg8 \360\2\x0F\x10\101 386,UNDOC,SM,ND
+UMOV reg8,reg8 \360\2\x0F\x10\101 386,UNDOC,ND
+UMOV mem,reg16 \360\320\2\x0F\x11\101 386,UNDOC,SM,ND
+UMOV reg16,reg16 \360\320\2\x0F\x11\101 386,UNDOC,ND
+UMOV mem,reg32 \360\321\2\x0F\x11\101 386,UNDOC,SM,ND
+UMOV reg32,reg32 \360\321\2\x0F\x11\101 386,UNDOC,ND
+UMOV reg8,mem \360\2\x0F\x12\110 386,UNDOC,SM,ND
+UMOV reg8,reg8 \360\2\x0F\x12\110 386,UNDOC,ND
+UMOV reg16,mem \360\320\2\x0F\x13\110 386,UNDOC,SM,ND
+UMOV reg16,reg16 \360\320\2\x0F\x13\110 386,UNDOC,ND
+UMOV reg32,mem \360\321\2\x0F\x13\110 386,UNDOC,SM,ND
+UMOV reg32,reg32 \360\321\2\x0F\x13\110 386,UNDOC,ND
+VERR mem \2\x0F\x00\204 286,PROT
+VERR mem16 \2\x0F\x00\204 286,PROT
+VERR reg16 \2\x0F\x00\204 286,PROT
+VERW mem \2\x0F\x00\205 286,PROT
+VERW mem16 \2\x0F\x00\205 286,PROT
+VERW reg16 \2\x0F\x00\205 286,PROT
+FWAIT void \341 8086
+WBINVD void \2\x0F\x09 486,PRIV
+WRSHR rm32 \321\2\x0F\x37\200 P6,CYRIX,SMM
+WRMSR void \2\x0F\x30 PENT,PRIV
+XADD mem,reg8 \2\x0F\xC0\101 486,SM
+XADD reg8,reg8 \2\x0F\xC0\101 486
+XADD mem,reg16 \320\2\x0F\xC1\101 486,SM
+XADD reg16,reg16 \320\2\x0F\xC1\101 486
+XADD mem,reg32 \321\2\x0F\xC1\101 486,SM
+XADD reg32,reg32 \321\2\x0F\xC1\101 486
+XADD mem,reg64 \324\2\x0F\xC1\101 X64,SM
+XADD reg64,reg64 \324\2\x0F\xC1\101 X64
+XBTS reg16,mem \320\2\x0F\xA6\110 386,SW,UNDOC,ND
+XBTS reg16,reg16 \320\2\x0F\xA6\110 386,UNDOC,ND
+XBTS reg32,mem \321\2\x0F\xA6\110 386,SD,UNDOC,ND
+XBTS reg32,reg32 \321\2\x0F\xA6\110 386,UNDOC,ND
+XCHG reg_ax,reg16 \320\11\x90 8086
+XCHG reg_eax,reg32na \321\11\x90 386
+XCHG reg_rax,reg64 \324\11\x90 X64
+XCHG reg16,reg_ax \320\10\x90 8086
+XCHG reg32na,reg_eax \321\10\x90 386
+XCHG reg64,reg_rax \324\10\x90 X64
+; This must be NOLONG since opcode 90 is NOP, and in 64-bit mode
+; "xchg eax,eax" is *not* a NOP.
+XCHG reg_eax,reg_eax \321\1\x90 386,NOLONG
+XCHG reg8,mem \1\x86\110 8086,SM
+XCHG reg8,reg8 \1\x86\110 8086
+XCHG reg16,mem \320\1\x87\110 8086,SM
+XCHG reg16,reg16 \320\1\x87\110 8086
+XCHG reg32,mem \321\1\x87\110 386,SM
+XCHG reg32,reg32 \321\1\x87\110 386
+XCHG reg64,mem \324\1\x87\110 X64,SM
+XCHG reg64,reg64 \324\1\x87\110 X64
+XCHG mem,reg8 \1\x86\101 8086,SM
+XCHG reg8,reg8 \1\x86\101 8086
+XCHG mem,reg16 \320\1\x87\101 8086,SM
+XCHG reg16,reg16 \320\1\x87\101 8086
+XCHG mem,reg32 \321\1\x87\101 386,SM
+XCHG reg32,reg32 \321\1\x87\101 386
+XCHG mem,reg64 \324\1\x87\101 X64,SM
+XCHG reg64,reg64 \324\1\x87\101 X64
+XLATB void \1\xD7 8086
+XLAT void \1\xD7 8086
+XOR mem,reg8 \1\x30\101 8086,SM
+XOR reg8,reg8 \1\x30\101 8086
+XOR mem,reg16 \320\1\x31\101 8086,SM
+XOR reg16,reg16 \320\1\x31\101 8086
+XOR mem,reg32 \321\1\x31\101 386,SM
+XOR reg32,reg32 \321\1\x31\101 386
+XOR mem,reg64 \324\1\x31\101 X64,SM
+XOR reg64,reg64 \324\1\x31\101 X64
+XOR reg8,mem \1\x32\110 8086,SM
+XOR reg8,reg8 \1\x32\110 8086
+XOR reg16,mem \320\1\x33\110 8086,SM
+XOR reg16,reg16 \320\1\x33\110 8086
+XOR reg32,mem \321\1\x33\110 386,SM
+XOR reg32,reg32 \321\1\x33\110 386
+XOR reg64,mem \324\1\x33\110 X64,SM
+XOR reg64,reg64 \324\1\x33\110 X64
+XOR rm16,imm8 \320\1\x83\206\275 8086
+XOR rm32,imm8 \321\1\x83\206\275 386
+XOR rm64,imm8 \324\1\x83\206\275 X64
+XOR reg_al,imm \1\x34\21 8086,SM
+XOR reg_ax,sbyte16 \320\1\x83\206\275 8086,SM
+XOR reg_ax,imm \320\1\x35\31 8086,SM
+XOR reg_eax,sbyte32 \321\1\x83\206\275 386,SM
+XOR reg_eax,imm \321\1\x35\41 386,SM
+XOR reg_rax,sbyte64 \324\1\x83\206\275 X64,SM
+XOR reg_rax,imm \324\1\x35\255 X64,SM
+XOR rm8,imm \1\x80\206\21 8086,SM
+XOR rm16,imm \320\145\x81\206\141 8086,SM
+XOR rm32,imm \321\155\x81\206\151 386,SM
+XOR rm64,imm \324\155\x81\206\251 X64,SM
+XOR mem,imm8 \1\x80\206\21 8086,SM
+XOR mem,imm16 \320\145\x81\206\141 8086,SM
+XOR mem,imm32 \321\155\x81\206\151 386,SM
+CMOVcc reg16,mem \320\1\x0F\330\x40\110 P6,SM
+CMOVcc reg16,reg16 \320\1\x0F\330\x40\110 P6
+CMOVcc reg32,mem \321\1\x0F\330\x40\110 P6,SM
+CMOVcc reg32,reg32 \321\1\x0F\330\x40\110 P6
+CMOVcc reg64,mem \324\1\x0F\330\x40\110 X64,SM
+CMOVcc reg64,reg64 \324\1\x0F\330\x40\110 X64
+Jcc imm|near \322\1\x0F\330\x80\64 386
+Jcc imm16|near \320\1\x0F\330\x80\64 386
+Jcc imm32|near \321\1\x0F\330\x80\64 386
+Jcc imm|short \330\x70\50 8086,ND
+Jcc imm \370\330\x70\50 8086,ND
+Jcc imm \1\x0F\330\x80\64 386,ND
+Jcc imm \330\x71\373\1\xE9\64 8086,ND
+Jcc imm \330\x70\50 8086
+SETcc mem \1\x0F\330\x90\200 386,SB
+SETcc reg8 \1\x0F\330\x90\200 386
+
+;# Katmai Streaming SIMD instructions (SSE -- a.k.a. KNI, XMM, MMX2)
+ADDPS xmmreg,xmmrm \360\2\x0F\x58\110 KATMAI,SSE
+ADDSS xmmreg,xmmrm \363\2\x0F\x58\110 KATMAI,SSE,SD
+ANDNPS xmmreg,xmmrm \360\2\x0F\x55\110 KATMAI,SSE
+ANDPS xmmreg,xmmrm \360\2\x0F\x54\110 KATMAI,SSE
+CMPEQPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x00 KATMAI,SSE
+CMPEQSS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x00 KATMAI,SSE
+CMPLEPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x02 KATMAI,SSE
+CMPLESS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x02 KATMAI,SSE
+CMPLTPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x01 KATMAI,SSE
+CMPLTSS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x01 KATMAI,SSE
+CMPNEQPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x04 KATMAI,SSE
+CMPNEQSS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x04 KATMAI,SSE
+CMPNLEPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x06 KATMAI,SSE
+CMPNLESS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x06 KATMAI,SSE
+CMPNLTPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x05 KATMAI,SSE
+CMPNLTSS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x05 KATMAI,SSE
+CMPORDPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x07 KATMAI,SSE
+CMPORDSS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x07 KATMAI,SSE
+CMPUNORDPS xmmreg,xmmrm \360\2\x0F\xC2\110\1\x03 KATMAI,SSE
+CMPUNORDSS xmmreg,xmmrm \363\2\x0F\xC2\110\1\x03 KATMAI,SSE
+; CMPPS/CMPSS must come after the specific ops; that way the disassembler will find the
+; specific ops first and only disassemble illegal ones as cmpps/cmpss.
+CMPPS xmmreg,mem,imm \360\2\x0F\xC2\110\26 KATMAI,SSE,SB,AR2
+CMPPS xmmreg,xmmreg,imm \360\2\x0F\xC2\110\26 KATMAI,SSE,SB,AR2
+CMPSS xmmreg,mem,imm \363\2\x0F\xC2\110\26 KATMAI,SSE,SB,AR2
+CMPSS xmmreg,xmmreg,imm \363\2\x0F\xC2\110\26 KATMAI,SSE,SB,AR2
+COMISS xmmreg,xmmrm \360\2\x0F\x2F\110 KATMAI,SSE
+CVTPI2PS xmmreg,mmxrm \360\2\x0F\x2A\110 KATMAI,SSE,MMX,SQ
+CVTPS2PI mmxreg,xmmrm \360\2\x0F\x2D\110 KATMAI,SSE,MMX,SQ
+CVTSI2SS xmmreg,mem \363\2\x0F\x2A\110 KATMAI,SSE,SD,AR1,ND
+CVTSI2SS xmmreg,rm32 \363\2\x0F\x2A\110 KATMAI,SSE,SD,AR1
+CVTSI2SS xmmreg,rm64 \324\363\2\x0F\x2A\110 X64,SSE,SQ,AR1
+CVTSS2SI reg32,xmmreg \363\2\x0F\x2D\110 KATMAI,SSE,SD,AR1
+CVTSS2SI reg32,mem \363\2\x0F\x2D\110 KATMAI,SSE,SD,AR1
+CVTSS2SI reg64,xmmreg \324\363\2\x0F\x2D\110 X64,SSE,SD,AR1
+CVTSS2SI reg64,mem \324\363\2\x0F\x2D\110 X64,SSE,SD,AR1
+CVTTPS2PI mmxreg,xmmrm \360\2\x0F\x2C\110 KATMAI,SSE,MMX,SQ
+CVTTSS2SI reg32,xmmrm \363\2\x0F\x2C\110 KATMAI,SSE,SD,AR1
+CVTTSS2SI reg64,xmmrm \324\363\2\x0F\x2C\110 X64,SSE,SD,AR1
+DIVPS xmmreg,xmmrm \360\2\x0F\x5E\110 KATMAI,SSE
+DIVSS xmmreg,xmmrm \363\2\x0F\x5E\110 KATMAI,SSE
+LDMXCSR mem \2\x0F\xAE\202 KATMAI,SSE,SD
+MAXPS xmmreg,xmmrm \360\2\x0F\x5F\110 KATMAI,SSE
+MAXSS xmmreg,xmmrm \363\2\x0F\x5F\110 KATMAI,SSE
+MINPS xmmreg,xmmrm \360\2\x0F\x5D\110 KATMAI,SSE
+MINSS xmmreg,xmmrm \363\2\x0F\x5D\110 KATMAI,SSE
+MOVAPS xmmreg,mem \360\2\x0F\x28\110 KATMAI,SSE
+MOVAPS mem,xmmreg \360\2\x0F\x29\101 KATMAI,SSE
+MOVAPS xmmreg,xmmreg \360\2\x0F\x28\110 KATMAI,SSE
+MOVAPS xmmreg,xmmreg \360\2\x0F\x29\101 KATMAI,SSE
+MOVHPS xmmreg,mem \360\2\x0F\x16\110 KATMAI,SSE
+MOVHPS mem,xmmreg \360\2\x0F\x17\101 KATMAI,SSE
+MOVLHPS xmmreg,xmmreg \360\2\x0F\x16\110 KATMAI,SSE
+MOVLPS xmmreg,mem \360\2\x0F\x12\110 KATMAI,SSE
+MOVLPS mem,xmmreg \360\2\x0F\x13\101 KATMAI,SSE
+MOVHLPS xmmreg,xmmreg \360\2\x0F\x12\110 KATMAI,SSE
+MOVMSKPS reg32,xmmreg \360\2\x0F\x50\110 KATMAI,SSE
+MOVMSKPS reg64,xmmreg \360\324\2\x0F\x50\110 X64,SSE
+MOVNTPS mem,xmmreg \360\2\x0F\x2B\101 KATMAI,SSE
+MOVSS xmmreg,mem \363\2\x0F\x10\110 KATMAI,SSE
+MOVSS mem,xmmreg \363\2\x0F\x11\101 KATMAI,SSE
+MOVSS xmmreg,xmmreg \363\2\x0F\x10\110 KATMAI,SSE
+MOVSS xmmreg,xmmreg \363\2\x0F\x11\101 KATMAI,SSE
+MOVUPS xmmreg,mem \360\2\x0F\x10\110 KATMAI,SSE
+MOVUPS mem,xmmreg \360\2\x0F\x11\101 KATMAI,SSE
+MOVUPS xmmreg,xmmreg \360\2\x0F\x10\110 KATMAI,SSE
+MOVUPS xmmreg,xmmreg \360\2\x0F\x11\101 KATMAI,SSE
+MULPS xmmreg,xmmrm \360\2\x0F\x59\110 KATMAI,SSE
+MULSS xmmreg,xmmrm \363\2\x0F\x59\110 KATMAI,SSE
+ORPS xmmreg,xmmrm \360\2\x0F\x56\110 KATMAI,SSE
+RCPPS xmmreg,xmmrm \360\2\x0F\x53\110 KATMAI,SSE
+RCPSS xmmreg,xmmrm \363\2\x0F\x53\110 KATMAI,SSE
+RSQRTPS xmmreg,xmmrm \360\2\x0F\x52\110 KATMAI,SSE
+RSQRTSS xmmreg,xmmrm \363\2\x0F\x52\110 KATMAI,SSE
+SHUFPS xmmreg,mem,imm \360\2\x0F\xC6\110\26 KATMAI,SSE,SB,AR2
+SHUFPS xmmreg,xmmreg,imm \360\2\x0F\xC6\110\26 KATMAI,SSE,SB,AR2
+SQRTPS xmmreg,xmmrm \360\2\x0F\x51\110 KATMAI,SSE
+SQRTSS xmmreg,xmmrm \363\2\x0F\x51\110 KATMAI,SSE
+STMXCSR mem \2\x0F\xAE\203 KATMAI,SSE,SD
+SUBPS xmmreg,xmmrm \360\2\x0F\x5C\110 KATMAI,SSE
+SUBSS xmmreg,xmmrm \363\2\x0F\x5C\110 KATMAI,SSE
+UCOMISS xmmreg,xmmrm \360\2\x0F\x2E\110 KATMAI,SSE
+UNPCKHPS xmmreg,xmmrm \360\2\x0F\x15\110 KATMAI,SSE
+UNPCKLPS xmmreg,xmmrm \360\2\x0F\x14\110 KATMAI,SSE
+XORPS xmmreg,xmmrm \360\2\x0F\x57\110 KATMAI,SSE
+
+;# Introduced in Deschutes but necessary for SSE support
+FXRSTOR mem [m: 0f ae /1] P6,SSE,FPU
+FXRSTOR64 mem [m: o64 0f ae /1] X64,SSE,FPU
+FXSAVE mem [m: 0f ae /0] P6,SSE,FPU
+FXSAVE64 mem [m: o64 0f ae /0] X64,SSE,FPU
+
+;# XSAVE group (AVX and extended state)
+; Introduced in late Penryn ... we really need to clean up the handling
+; of CPU feature bits.
+XGETBV void \360\3\x0F\x01\xD0 NEHALEM
+XSETBV void \360\3\x0F\x01\xD1 NEHALEM,PRIV
+XSAVE mem [m: 0f ae /4] NEHALEM
+XSAVE64 mem [m: o64 0f ae /4] LONG,NEHALEM
+XSAVEOPT mem [m: 0f ae /6] FUTURE
+XSAVEOPT64 mem [m: o64 0f ae /6] LONG,FUTURE
+XRSTOR mem [m: 0f ae /5] NEHALEM
+XRSTOR64 mem [m: o64 0f ae /5] LONG,NEHALEM
+
+; These instructions are not SSE-specific; they are
+;# Generic memory operations
+; and work even if CR4.OSFXFR == 0
+PREFETCHNTA mem \2\x0F\x18\200 KATMAI
+PREFETCHT0 mem \2\x0F\x18\201 KATMAI
+PREFETCHT1 mem \2\x0F\x18\202 KATMAI
+PREFETCHT2 mem \2\x0F\x18\203 KATMAI
+SFENCE void \3\x0F\xAE\xF8 KATMAI
+
+;# New MMX instructions introduced in Katmai
+MASKMOVQ mmxreg,mmxreg \360\2\x0F\xF7\110 KATMAI,MMX
+MOVNTQ mem,mmxreg \360\2\x0F\xE7\101 KATMAI,MMX,SQ
+PAVGB mmxreg,mmxrm \360\323\2\x0F\xE0\110 KATMAI,MMX,SQ
+PAVGW mmxreg,mmxrm \360\323\2\x0F\xE3\110 KATMAI,MMX,SQ
+PEXTRW reg32,mmxreg,imm \360\2\x0F\xC5\110\26 KATMAI,MMX,SB,AR2
+; PINSRW is documented as using a reg32, but it's really using only 16 bit
+; -- accept either, but be truthful in disassembly
+PINSRW mmxreg,mem,imm \360\2\x0F\xC4\110\26 KATMAI,MMX,SB,AR2
+PINSRW mmxreg,rm16,imm \360\2\x0F\xC4\110\26 KATMAI,MMX,SB,AR2
+PINSRW mmxreg,reg32,imm \360\2\x0F\xC4\110\26 KATMAI,MMX,SB,AR2
+PMAXSW mmxreg,mmxrm \360\323\2\x0F\xEE\110 KATMAI,MMX,SQ
+PMAXUB mmxreg,mmxrm \360\323\2\x0F\xDE\110 KATMAI,MMX,SQ
+PMINSW mmxreg,mmxrm \360\323\2\x0F\xEA\110 KATMAI,MMX,SQ
+PMINUB mmxreg,mmxrm \360\323\2\x0F\xDA\110 KATMAI,MMX,SQ
+PMOVMSKB reg32,mmxreg \360\2\x0F\xD7\110 KATMAI,MMX
+PMULHUW mmxreg,mmxrm \360\323\2\x0F\xE4\110 KATMAI,MMX,SQ
+PSADBW mmxreg,mmxrm \360\323\2\x0F\xF6\110 KATMAI,MMX,SQ
+PSHUFW mmxreg,mmxrm,imm \360\323\2\x0F\x70\110\22 KATMAI,MMX,SM2,SB,AR2
+
+;# AMD Enhanced 3DNow! (Athlon) instructions
+PF2IW mmxreg,mmxrm \323\2\x0F\x0F\110\01\x1C PENT,3DNOW,SQ
+PFNACC mmxreg,mmxrm \323\2\x0F\x0F\110\01\x8A PENT,3DNOW,SQ
+PFPNACC mmxreg,mmxrm \323\2\x0F\x0F\110\01\x8E PENT,3DNOW,SQ
+PI2FW mmxreg,mmxrm \323\2\x0F\x0F\110\01\x0C PENT,3DNOW,SQ
+PSWAPD mmxreg,mmxrm \323\2\x0F\x0F\110\01\xBB PENT,3DNOW,SQ
+
+;# Willamette SSE2 Cacheability Instructions
+MASKMOVDQU xmmreg,xmmreg \361\2\x0F\xF7\110 WILLAMETTE,SSE2
+; CLFLUSH needs its own feature flag implemented one day
+CLFLUSH mem \2\x0F\xAE\207 WILLAMETTE,SSE2
+MOVNTDQ mem,xmmreg \361\2\x0F\xE7\101 WILLAMETTE,SSE2,SO
+MOVNTI mem,reg32 \360\2\x0F\xC3\101 WILLAMETTE,SD
+MOVNTI mem,reg64 \324\360\2\x0F\xC3\101 X64,SQ
+MOVNTPD mem,xmmreg \361\2\x0F\x2B\101 WILLAMETTE,SSE2,SO
+LFENCE void \3\x0F\xAE\xE8 WILLAMETTE,SSE2
+MFENCE void \3\x0F\xAE\xF0 WILLAMETTE,SSE2
+
+;# Willamette MMX instructions (SSE2 SIMD Integer Instructions)
+MOVD mem,xmmreg \361\317\2\x0F\x7E\101 WILLAMETTE,SSE2,SD
+MOVD xmmreg,mem \361\317\2\x0F\x6E\110 WILLAMETTE,SSE2,SD
+MOVD xmmreg,rm32 \361\317\2\x0F\x6E\110 WILLAMETTE,SSE2
+MOVD rm32,xmmreg \361\317\2\x0F\x7E\101 WILLAMETTE,SSE2
+MOVDQA xmmreg,xmmreg \361\2\x0F\x6F\110 WILLAMETTE,SSE2
+MOVDQA mem,xmmreg \361\2\x0F\x7F\101 WILLAMETTE,SSE2,SO
+MOVDQA xmmreg,mem \361\2\x0F\x6F\110 WILLAMETTE,SSE2,SO
+MOVDQA xmmreg,xmmreg \361\2\x0F\x7F\101 WILLAMETTE,SSE2
+MOVDQU xmmreg,xmmreg \363\2\x0F\x6F\110 WILLAMETTE,SSE2
+MOVDQU mem,xmmreg \363\2\x0F\x7F\101 WILLAMETTE,SSE2,SO
+MOVDQU xmmreg,mem \363\2\x0F\x6F\110 WILLAMETTE,SSE2,SO
+MOVDQU xmmreg,xmmreg \363\2\x0F\x7F\101 WILLAMETTE,SSE2
+MOVDQ2Q mmxreg,xmmreg \362\2\x0F\xD6\110 WILLAMETTE,SSE2
+MOVQ xmmreg,xmmreg \363\2\x0F\x7E\110 WILLAMETTE,SSE2
+MOVQ xmmreg,xmmreg \361\2\x0F\xD6\101 WILLAMETTE,SSE2
+MOVQ mem,xmmreg \361\2\x0F\xD6\101 WILLAMETTE,SSE2,SQ
+MOVQ xmmreg,mem \363\2\x0F\x7E\110 WILLAMETTE,SSE2,SQ
+MOVQ xmmreg,rm64 \361\324\2\x0F\x6E\110 X64,SSE2
+MOVQ rm64,xmmreg \361\324\2\x0F\x7E\101 X64,SSE2
+MOVQ2DQ xmmreg,mmxreg \363\2\x0F\xD6\110 WILLAMETTE,SSE2
+PACKSSWB xmmreg,xmmrm \361\2\x0F\x63\110 WILLAMETTE,SSE2,SO
+PACKSSDW xmmreg,xmmrm \361\2\x0F\x6B\110 WILLAMETTE,SSE2,SO
+PACKUSWB xmmreg,xmmrm \361\2\x0F\x67\110 WILLAMETTE,SSE2,SO
+PADDB xmmreg,xmmrm \361\2\x0F\xFC\110 WILLAMETTE,SSE2,SO
+PADDW xmmreg,xmmrm \361\2\x0F\xFD\110 WILLAMETTE,SSE2,SO
+PADDD xmmreg,xmmrm \361\2\x0F\xFE\110 WILLAMETTE,SSE2,SO
+PADDQ mmxreg,mmxrm \360\2\x0F\xD4\110 WILLAMETTE,MMX,SQ
+PADDQ xmmreg,xmmrm \361\2\x0F\xD4\110 WILLAMETTE,SSE2,SO
+PADDSB xmmreg,xmmrm \361\2\x0F\xEC\110 WILLAMETTE,SSE2,SO
+PADDSW xmmreg,xmmrm \361\2\x0F\xED\110 WILLAMETTE,SSE2,SO
+PADDUSB xmmreg,xmmrm \361\2\x0F\xDC\110 WILLAMETTE,SSE2,SO
+PADDUSW xmmreg,xmmrm \361\2\x0F\xDD\110 WILLAMETTE,SSE2,SO
+PAND xmmreg,xmmrm \361\2\x0F\xDB\110 WILLAMETTE,SSE2,SO
+PANDN xmmreg,xmmrm \361\2\x0F\xDF\110 WILLAMETTE,SSE2,SO
+PAVGB xmmreg,xmmrm \361\2\x0F\xE0\110 WILLAMETTE,SSE2,SO
+PAVGW xmmreg,xmmrm \361\2\x0F\xE3\110 WILLAMETTE,SSE2,SO
+PCMPEQB xmmreg,xmmrm \361\2\x0F\x74\110 WILLAMETTE,SSE2,SO
+PCMPEQW xmmreg,xmmrm \361\2\x0F\x75\110 WILLAMETTE,SSE2,SO
+PCMPEQD xmmreg,xmmrm \361\2\x0F\x76\110 WILLAMETTE,SSE2,SO
+PCMPGTB xmmreg,xmmrm \361\2\x0F\x64\110 WILLAMETTE,SSE2,SO
+PCMPGTW xmmreg,xmmrm \361\2\x0F\x65\110 WILLAMETTE,SSE2,SO
+PCMPGTD xmmreg,xmmrm \361\2\x0F\x66\110 WILLAMETTE,SSE2,SO
+PEXTRW reg32,xmmreg,imm \361\2\x0F\xC5\110\26 WILLAMETTE,SSE2,SB,AR2
+PINSRW xmmreg,reg16,imm \361\2\x0F\xC4\110\26 WILLAMETTE,SSE2,SB,AR2
+PINSRW xmmreg,reg32,imm \361\2\x0F\xC4\110\26 WILLAMETTE,SSE2,SB,AR2,ND
+PINSRW xmmreg,mem,imm \361\2\x0F\xC4\110\26 WILLAMETTE,SSE2,SB,AR2
+PINSRW xmmreg,mem16,imm \361\2\x0F\xC4\110\26 WILLAMETTE,SSE2,SB,AR2
+PMADDWD xmmreg,xmmrm \361\2\x0F\xF5\110 WILLAMETTE,SSE2,SO
+PMAXSW xmmreg,xmmrm \361\2\x0F\xEE\110 WILLAMETTE,SSE2,SO
+PMAXUB xmmreg,xmmrm \361\2\x0F\xDE\110 WILLAMETTE,SSE2,SO
+PMINSW xmmreg,xmmrm \361\2\x0F\xEA\110 WILLAMETTE,SSE2,SO
+PMINUB xmmreg,xmmrm \361\2\x0F\xDA\110 WILLAMETTE,SSE2,SO
+PMOVMSKB reg32,xmmreg \361\2\x0F\xD7\110 WILLAMETTE,SSE2
+PMULHUW xmmreg,xmmrm \361\2\x0F\xE4\110 WILLAMETTE,SSE2,SO
+PMULHW xmmreg,xmmrm \361\2\x0F\xE5\110 WILLAMETTE,SSE2,SO
+PMULLW xmmreg,xmmrm \361\2\x0F\xD5\110 WILLAMETTE,SSE2,SO
+PMULUDQ mmxreg,mmxrm \360\323\2\x0F\xF4\110 WILLAMETTE,SSE2,SO
+PMULUDQ xmmreg,xmmrm \361\2\x0F\xF4\110 WILLAMETTE,SSE2,SO
+POR xmmreg,xmmrm \361\2\x0F\xEB\110 WILLAMETTE,SSE2,SO
+PSADBW xmmreg,xmmrm \361\2\x0F\xF6\110 WILLAMETTE,SSE2,SO
+PSHUFD xmmreg,xmmreg,imm \361\2\x0F\x70\110\22 WILLAMETTE,SSE2,SB,AR2
+PSHUFD xmmreg,mem,imm \361\2\x0F\x70\110\22 WILLAMETTE,SSE2,SM2,SB,AR2
+PSHUFHW xmmreg,xmmreg,imm \363\2\x0F\x70\110\22 WILLAMETTE,SSE2,SB,AR2
+PSHUFHW xmmreg,mem,imm \363\2\x0F\x70\110\22 WILLAMETTE,SSE2,SM2,SB,AR2
+PSHUFLW xmmreg,xmmreg,imm \362\2\x0F\x70\110\22 WILLAMETTE,SSE2,SB,AR2
+PSHUFLW xmmreg,mem,imm \362\2\x0F\x70\110\22 WILLAMETTE,SSE2,SM2,SB,AR2
+PSLLDQ xmmreg,imm \361\2\x0F\x73\207\25 WILLAMETTE,SSE2,SB,AR1
+PSLLW xmmreg,xmmrm \361\2\x0F\xF1\110 WILLAMETTE,SSE2,SO
+PSLLW xmmreg,imm \361\2\x0F\x71\206\25 WILLAMETTE,SSE2,SB,AR1
+PSLLD xmmreg,xmmrm \361\2\x0F\xF2\110 WILLAMETTE,SSE2,SO
+PSLLD xmmreg,imm \361\2\x0F\x72\206\25 WILLAMETTE,SSE2,SB,AR1
+PSLLQ xmmreg,xmmrm \361\2\x0F\xF3\110 WILLAMETTE,SSE2,SO
+PSLLQ xmmreg,imm \361\2\x0F\x73\206\25 WILLAMETTE,SSE2,SB,AR1
+PSRAW xmmreg,xmmrm \361\2\x0F\xE1\110 WILLAMETTE,SSE2,SO
+PSRAW xmmreg,imm \361\2\x0F\x71\204\25 WILLAMETTE,SSE2,SB,AR1
+PSRAD xmmreg,xmmrm \361\2\x0F\xE2\110 WILLAMETTE,SSE2,SO
+PSRAD xmmreg,imm \361\2\x0F\x72\204\25 WILLAMETTE,SSE2,SB,AR1
+PSRLDQ xmmreg,imm \361\2\x0F\x73\203\25 WILLAMETTE,SSE2,SB,AR1
+PSRLW xmmreg,xmmrm \361\2\x0F\xD1\110 WILLAMETTE,SSE2,SO
+PSRLW xmmreg,imm \361\2\x0F\x71\202\25 WILLAMETTE,SSE2,SB,AR1
+PSRLD xmmreg,xmmrm \361\2\x0F\xD2\110 WILLAMETTE,SSE2,SO
+PSRLD xmmreg,imm \361\2\x0F\x72\202\25 WILLAMETTE,SSE2,SB,AR1
+PSRLQ xmmreg,xmmrm \361\2\x0F\xD3\110 WILLAMETTE,SSE2,SO
+PSRLQ xmmreg,imm \361\2\x0F\x73\202\25 WILLAMETTE,SSE2,SB,AR1
+PSUBB xmmreg,xmmrm \361\2\x0F\xF8\110 WILLAMETTE,SSE2,SO
+PSUBW xmmreg,xmmrm \361\2\x0F\xF9\110 WILLAMETTE,SSE2,SO
+PSUBD xmmreg,xmmrm \361\2\x0F\xFA\110 WILLAMETTE,SSE2,SO
+PSUBQ mmxreg,mmxrm \360\323\2\x0F\xFB\110 WILLAMETTE,SSE2,SO
+PSUBQ xmmreg,xmmrm \361\2\x0F\xFB\110 WILLAMETTE,SSE2,SO
+PSUBSB xmmreg,xmmrm \361\2\x0F\xE8\110 WILLAMETTE,SSE2,SO
+PSUBSW xmmreg,xmmrm \361\2\x0F\xE9\110 WILLAMETTE,SSE2,SO
+PSUBUSB xmmreg,xmmrm \361\2\x0F\xD8\110 WILLAMETTE,SSE2,SO
+PSUBUSW xmmreg,xmmrm \361\2\x0F\xD9\110 WILLAMETTE,SSE2,SO
+PUNPCKHBW xmmreg,xmmrm \361\2\x0F\x68\110 WILLAMETTE,SSE2,SO
+PUNPCKHWD xmmreg,xmmrm \361\2\x0F\x69\110 WILLAMETTE,SSE2,SO
+PUNPCKHDQ xmmreg,xmmrm \361\2\x0F\x6A\110 WILLAMETTE,SSE2,SO
+PUNPCKHQDQ xmmreg,xmmrm \361\2\x0F\x6D\110 WILLAMETTE,SSE2,SO
+PUNPCKLBW xmmreg,xmmrm \361\2\x0F\x60\110 WILLAMETTE,SSE2,SO
+PUNPCKLWD xmmreg,xmmrm \361\2\x0F\x61\110 WILLAMETTE,SSE2,SO
+PUNPCKLDQ xmmreg,xmmrm \361\2\x0F\x62\110 WILLAMETTE,SSE2,SO
+PUNPCKLQDQ xmmreg,xmmrm \361\2\x0F\x6C\110 WILLAMETTE,SSE2,SO
+PXOR xmmreg,xmmrm \361\2\x0F\xEF\110 WILLAMETTE,SSE2,SO
+
+;# Willamette Streaming SIMD instructions (SSE2)
+ADDPD xmmreg,xmmrm \361\2\x0F\x58\110 WILLAMETTE,SSE2,SO
+ADDSD xmmreg,xmmrm \362\2\x0F\x58\110 WILLAMETTE,SSE2,SQ
+ANDNPD xmmreg,xmmrm \361\2\x0F\x55\110 WILLAMETTE,SSE2,SO
+ANDPD xmmreg,xmmrm \361\2\x0F\x54\110 WILLAMETTE,SSE2,SO
+CMPEQPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x00 WILLAMETTE,SSE2,SO
+CMPEQSD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x00 WILLAMETTE,SSE2
+CMPLEPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x02 WILLAMETTE,SSE2,SO
+CMPLESD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x02 WILLAMETTE,SSE2
+CMPLTPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x01 WILLAMETTE,SSE2,SO
+CMPLTSD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x01 WILLAMETTE,SSE2
+CMPNEQPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x04 WILLAMETTE,SSE2,SO
+CMPNEQSD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x04 WILLAMETTE,SSE2
+CMPNLEPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x06 WILLAMETTE,SSE2,SO
+CMPNLESD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x06 WILLAMETTE,SSE2
+CMPNLTPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x05 WILLAMETTE,SSE2,SO
+CMPNLTSD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x05 WILLAMETTE,SSE2
+CMPORDPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x07 WILLAMETTE,SSE2,SO
+CMPORDSD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x07 WILLAMETTE,SSE2
+CMPUNORDPD xmmreg,xmmrm \361\2\x0F\xC2\110\1\x03 WILLAMETTE,SSE2,SO
+CMPUNORDSD xmmreg,xmmrm \362\2\x0F\xC2\110\1\x03 WILLAMETTE,SSE2
+; CMPPD/CMPSD must come after the specific ops; that way the disassembler will find the
+; specific ops first and only disassemble illegal ones as cmppd/cmpsd.
+CMPPD xmmreg,xmmrm,imm \361\2\x0F\xC2\110\26 WILLAMETTE,SSE2,SM2,SB,AR2
+CMPSD xmmreg,xmmrm,imm \362\2\x0F\xC2\110\26 WILLAMETTE,SSE2,SB,AR2
+COMISD xmmreg,xmmrm \361\2\x0F\x2F\110 WILLAMETTE,SSE2
+CVTDQ2PD xmmreg,xmmrm \363\2\x0F\xE6\110 WILLAMETTE,SSE2,SQ
+CVTDQ2PS xmmreg,xmmrm \360\2\x0F\x5B\110 WILLAMETTE,SSE2,SO
+CVTPD2DQ xmmreg,xmmrm \362\2\x0F\xE6\110 WILLAMETTE,SSE2,SO
+CVTPD2PI mmxreg,xmmrm \361\2\x0F\x2D\110 WILLAMETTE,SSE2,SO
+CVTPD2PS xmmreg,xmmrm \361\2\x0F\x5A\110 WILLAMETTE,SSE2,SO
+CVTPI2PD xmmreg,mmxrm \361\2\x0F\x2A\110 WILLAMETTE,SSE2,SQ
+CVTPS2DQ xmmreg,xmmrm \361\2\x0F\x5B\110 WILLAMETTE,SSE2,SO
+CVTPS2PD xmmreg,xmmrm \360\2\x0F\x5A\110 WILLAMETTE,SSE2,SQ
+CVTSD2SI reg32,xmmreg \362\2\x0F\x2D\110 WILLAMETTE,SSE2,SQ,AR1
+CVTSD2SI reg32,mem \362\2\x0F\x2D\110 WILLAMETTE,SSE2,SQ,AR1
+CVTSD2SI reg64,xmmreg \324\362\2\x0F\x2D\110 X64,SSE2,SQ,AR1
+CVTSD2SI reg64,mem \324\362\2\x0F\x2D\110 X64,SSE2,SQ,AR1
+CVTSD2SS xmmreg,xmmrm \362\2\x0F\x5A\110 WILLAMETTE,SSE2,SQ
+CVTSI2SD xmmreg,mem \362\2\x0F\x2A\110 WILLAMETTE,SSE2,SD,AR1,ND
+CVTSI2SD xmmreg,rm32 \362\2\x0F\x2A\110 WILLAMETTE,SSE2,SD,AR1
+CVTSI2SD xmmreg,rm64 \324\362\2\x0F\x2A\110 X64,SSE2,SQ,AR1
+CVTSS2SD xmmreg,xmmrm \363\2\x0F\x5A\110 WILLAMETTE,SSE2,SD
+CVTTPD2PI mmxreg,xmmrm \361\2\x0F\x2C\110 WILLAMETTE,SSE2,SO
+CVTTPD2DQ xmmreg,xmmrm \361\2\x0F\xE6\110 WILLAMETTE,SSE2,SO
+CVTTPS2DQ xmmreg,xmmrm \363\2\x0F\x5B\110 WILLAMETTE,SSE2,SO
+CVTTSD2SI reg32,xmmreg \362\2\x0F\x2C\110 WILLAMETTE,SSE2,SQ,AR1
+CVTTSD2SI reg32,mem \362\2\x0F\x2C\110 WILLAMETTE,SSE2,SQ,AR1
+CVTTSD2SI reg64,xmmreg \324\362\2\x0F\x2C\110 X64,SSE2,SQ,AR1
+CVTTSD2SI reg64,mem \324\362\2\x0F\x2C\110 X64,SSE2,SQ,AR1
+DIVPD xmmreg,xmmrm \361\2\x0F\x5E\110 WILLAMETTE,SSE2,SO
+DIVSD xmmreg,xmmrm \362\2\x0F\x5E\110 WILLAMETTE,SSE2
+MAXPD xmmreg,xmmrm \361\2\x0F\x5F\110 WILLAMETTE,SSE2,SO
+MAXSD xmmreg,xmmrm \362\2\x0F\x5F\110 WILLAMETTE,SSE2
+MINPD xmmreg,xmmrm \361\2\x0F\x5D\110 WILLAMETTE,SSE2,SO
+MINSD xmmreg,xmmrm \362\2\x0F\x5D\110 WILLAMETTE,SSE2
+MOVAPD xmmreg,xmmreg \361\2\x0F\x28\110 WILLAMETTE,SSE2
+MOVAPD xmmreg,xmmreg \361\2\x0F\x29\101 WILLAMETTE,SSE2
+MOVAPD mem,xmmreg \361\2\x0F\x29\101 WILLAMETTE,SSE2,SO
+MOVAPD xmmreg,mem \361\2\x0F\x28\110 WILLAMETTE,SSE2,SO
+MOVHPD mem,xmmreg \361\2\x0F\x17\101 WILLAMETTE,SSE2
+MOVHPD xmmreg,mem \361\2\x0F\x16\110 WILLAMETTE,SSE2
+MOVLPD mem,xmmreg \361\2\x0F\x13\101 WILLAMETTE,SSE2
+MOVLPD xmmreg,mem \361\2\x0F\x12\110 WILLAMETTE,SSE2
+MOVMSKPD reg32,xmmreg \361\2\x0F\x50\110 WILLAMETTE,SSE2
+MOVMSKPD reg64,xmmreg \361\324\2\x0F\x50\110 X64,SSE2
+MOVSD xmmreg,xmmreg \362\2\x0F\x10\110 WILLAMETTE,SSE2
+MOVSD xmmreg,xmmreg \362\2\x0F\x11\101 WILLAMETTE,SSE2
+MOVSD mem,xmmreg \362\2\x0F\x11\101 WILLAMETTE,SSE2
+MOVSD xmmreg,mem \362\2\x0F\x10\110 WILLAMETTE,SSE2
+MOVUPD xmmreg,xmmreg \361\2\x0F\x10\110 WILLAMETTE,SSE2
+MOVUPD xmmreg,xmmreg \361\2\x0F\x11\101 WILLAMETTE,SSE2
+MOVUPD mem,xmmreg \361\2\x0F\x11\101 WILLAMETTE,SSE2,SO
+MOVUPD xmmreg,mem \361\2\x0F\x10\110 WILLAMETTE,SSE2,SO
+MULPD xmmreg,xmmrm \361\2\x0F\x59\110 WILLAMETTE,SSE2,SO
+MULSD xmmreg,xmmrm \362\2\x0F\x59\110 WILLAMETTE,SSE2
+ORPD xmmreg,xmmrm \361\2\x0F\x56\110 WILLAMETTE,SSE2,SO
+SHUFPD xmmreg,xmmreg,imm \361\2\x0F\xC6\110\26 WILLAMETTE,SSE2,SB,AR2
+SHUFPD xmmreg,mem,imm \361\2\x0F\xC6\110\26 WILLAMETTE,SSE2,SM,SB,AR2
+SQRTPD xmmreg,xmmrm \361\2\x0F\x51\110 WILLAMETTE,SSE2,SO
+SQRTSD xmmreg,xmmrm \362\2\x0F\x51\110 WILLAMETTE,SSE2
+SUBPD xmmreg,xmmrm \361\2\x0F\x5C\110 WILLAMETTE,SSE2,SO
+SUBSD xmmreg,xmmrm \362\2\x0F\x5C\110 WILLAMETTE,SSE2
+UCOMISD xmmreg,xmmrm \361\2\x0F\x2E\110 WILLAMETTE,SSE2
+UNPCKHPD xmmreg,xmmrm \361\2\x0F\x15\110 WILLAMETTE,SSE2,SO
+UNPCKLPD xmmreg,xmmrm \361\2\x0F\x14\110 WILLAMETTE,SSE2,SO
+XORPD xmmreg,xmmrm \361\2\x0F\x57\110 WILLAMETTE,SSE2,SO
+
+;# Prescott New Instructions (SSE3)
+ADDSUBPD xmmreg,xmmrm \361\2\x0F\xD0\110 PRESCOTT,SSE3,SO
+ADDSUBPS xmmreg,xmmrm \362\2\x0F\xD0\110 PRESCOTT,SSE3,SO
+HADDPD xmmreg,xmmrm \361\2\x0F\x7C\110 PRESCOTT,SSE3,SO
+HADDPS xmmreg,xmmrm \362\2\x0F\x7C\110 PRESCOTT,SSE3,SO
+HSUBPD xmmreg,xmmrm \361\2\x0F\x7D\110 PRESCOTT,SSE3,SO
+HSUBPS xmmreg,xmmrm \362\2\x0F\x7D\110 PRESCOTT,SSE3,SO
+LDDQU xmmreg,mem \362\2\x0F\xF0\110 PRESCOTT,SSE3,SO
+MOVDDUP xmmreg,xmmrm \362\2\x0F\x12\110 PRESCOTT,SSE3
+MOVSHDUP xmmreg,xmmrm \363\2\x0F\x16\110 PRESCOTT,SSE3
+MOVSLDUP xmmreg,xmmrm \363\2\x0F\x12\110 PRESCOTT,SSE3
+
+;# VMX Instructions
+VMCALL void \3\x0F\x01\xC1 VMX
+VMCLEAR mem \361\2\x0F\xC7\206 VMX
+VMLAUNCH void \3\x0F\x01\xC2 VMX
+VMLOAD void \3\x0F\x01\xDA X64,VMX
+VMMCALL void \3\x0F\x01\xD9 X64,VMX
+VMPTRLD mem \2\x0F\xC7\206 VMX
+VMPTRST mem \2\x0F\xC7\207 VMX
+VMREAD rm32,reg32 \360\2\x0F\x78\101 VMX,NOLONG,SD
+VMREAD rm64,reg64 \323\360\2\x0F\x78\101 X64,VMX,SQ
+VMRESUME void \3\x0F\x01\xC3 VMX
+VMRUN void \3\x0F\x01\xD8 X64,VMX
+VMSAVE void \3\x0F\x01\xDB X64,VMX
+VMWRITE reg32,rm32 \360\2\x0F\x79\110 VMX,NOLONG,SD
+VMWRITE reg64,rm64 \323\360\2\x0F\x79\110 X64,VMX,SQ
+VMXOFF void \3\x0F\x01\xC4 VMX
+VMXON mem \363\2\x0F\xC7\206 VMX
+;# Extended Page Tables VMX instructions
+INVEPT reg32,mem [rm: 66 0f 38 80 /r] VMX,SO,NOLONG
+INVEPT reg64,mem [rm: o64nw 66 0f 38 80 /r] VMX,SO,LONG
+INVVPID reg32,mem [rm: 66 0f 38 81 /r] VMX,SO,NOLONG
+INVVPID reg64,mem [rm: o64nw 66 0f 38 81 /r] VMX,SO,LONG
+
+;# Tejas New Instructions (SSSE3)
+PABSB mmxreg,mmxrm \360\3\x0F\x38\x1C\110 SSSE3,MMX,SQ
+PABSB xmmreg,xmmrm \361\3\x0F\x38\x1C\110 SSSE3
+PABSW mmxreg,mmxrm \360\3\x0F\x38\x1D\110 SSSE3,MMX,SQ
+PABSW xmmreg,xmmrm \361\3\x0F\x38\x1D\110 SSSE3
+PABSD mmxreg,mmxrm \360\3\x0F\x38\x1E\110 SSSE3,MMX,SQ
+PABSD xmmreg,xmmrm \361\3\x0F\x38\x1E\110 SSSE3
+PALIGNR mmxreg,mmxrm,imm \360\3\x0F\x3A\x0F\110\26 SSSE3,MMX,SQ
+PALIGNR xmmreg,xmmrm,imm \361\3\x0F\x3A\x0F\110\26 SSSE3
+PHADDW mmxreg,mmxrm \360\3\x0F\x38\x01\110 SSSE3,MMX,SQ
+PHADDW xmmreg,xmmrm \361\3\x0F\x38\x01\110 SSSE3
+PHADDD mmxreg,mmxrm \360\3\x0F\x38\x02\110 SSSE3,MMX,SQ
+PHADDD xmmreg,xmmrm \361\3\x0F\x38\x02\110 SSSE3
+PHADDSW mmxreg,mmxrm \360\3\x0F\x38\x03\110 SSSE3,MMX,SQ
+PHADDSW xmmreg,xmmrm \361\3\x0F\x38\x03\110 SSSE3
+PHSUBW mmxreg,mmxrm \360\3\x0F\x38\x05\110 SSSE3,MMX,SQ
+PHSUBW xmmreg,xmmrm \361\3\x0F\x38\x05\110 SSSE3
+PHSUBD mmxreg,mmxrm \360\3\x0F\x38\x06\110 SSSE3,MMX,SQ
+PHSUBD xmmreg,xmmrm \361\3\x0F\x38\x06\110 SSSE3
+PHSUBSW mmxreg,mmxrm \360\3\x0F\x38\x07\110 SSSE3,MMX,SQ
+PHSUBSW xmmreg,xmmrm \361\3\x0F\x38\x07\110 SSSE3
+PMADDUBSW mmxreg,mmxrm \360\3\x0F\x38\x04\110 SSSE3,MMX,SQ
+PMADDUBSW xmmreg,xmmrm \361\3\x0F\x38\x04\110 SSSE3
+PMULHRSW mmxreg,mmxrm \360\3\x0F\x38\x0B\110 SSSE3,MMX,SQ
+PMULHRSW xmmreg,xmmrm \361\3\x0F\x38\x0B\110 SSSE3
+PSHUFB mmxreg,mmxrm \360\3\x0F\x38\x00\110 SSSE3,MMX,SQ
+PSHUFB xmmreg,xmmrm \361\3\x0F\x38\x00\110 SSSE3
+PSIGNB mmxreg,mmxrm \360\3\x0F\x38\x08\110 SSSE3,MMX,SQ
+PSIGNB xmmreg,xmmrm \361\3\x0F\x38\x08\110 SSSE3
+PSIGNW mmxreg,mmxrm \360\3\x0F\x38\x09\110 SSSE3,MMX,SQ
+PSIGNW xmmreg,xmmrm \361\3\x0F\x38\x09\110 SSSE3
+PSIGND mmxreg,mmxrm \360\3\x0F\x38\x0A\110 SSSE3,MMX,SQ
+PSIGND xmmreg,xmmrm \361\3\x0F\x38\x0A\110 SSSE3
+
+;# AMD SSE4A
+EXTRQ xmmreg,imm,imm \361\2\x0F\x78\200\25\26 SSE4A,AMD
+EXTRQ xmmreg,xmmreg \361\2\x0F\x79\110 SSE4A,AMD
+INSERTQ xmmreg,xmmreg,imm,imm \362\2\x0F\x78\110\26\27 SSE4A,AMD
+INSERTQ xmmreg,xmmreg \362\2\x0F\x79\110 SSE4A,AMD
+MOVNTSD mem,xmmreg \362\2\x0F\x2B\101 SSE4A,AMD,SQ
+MOVNTSS mem,xmmreg \363\2\x0F\x2B\101 SSE4A,AMD,SD
+
+;# New instructions in Barcelona
+LZCNT reg16,rm16 \320\333\2\x0F\xBD\110 P6,AMD
+LZCNT reg32,rm32 \321\333\2\x0F\xBD\110 P6,AMD
+LZCNT reg64,rm64 \324\333\2\x0F\xBD\110 X64,AMD
+
+;# Penryn New Instructions (SSE4.1)
+BLENDPD xmmreg,xmmrm,imm \361\3\x0F\x3A\x0D\110\26 SSE41
+BLENDPS xmmreg,xmmrm,imm \361\3\x0F\x3A\x0C\110\26 SSE41
+BLENDVPD xmmreg,xmmrm,xmm0 \361\3\x0F\x38\x15\110 SSE41
+BLENDVPS xmmreg,xmmrm,xmm0 \361\3\x0F\x38\x14\110 SSE41
+DPPD xmmreg,xmmrm,imm \361\3\x0F\x3A\x41\110\26 SSE41
+DPPS xmmreg,xmmrm,imm \361\3\x0F\x3A\x40\110\26 SSE41
+EXTRACTPS rm32,xmmreg,imm \361\3\x0F\x3A\x17\101\26 SSE41
+EXTRACTPS reg64,xmmreg,imm \324\361\3\x0F\x3A\x17\101\26 SSE41,X64
+INSERTPS xmmreg,xmmrm,imm \361\3\x0F\x3A\x21\110\26 SSE41,SD
+MOVNTDQA xmmreg,mem \361\3\x0F\x38\x2A\110 SSE41
+MPSADBW xmmreg,xmmrm,imm \361\3\x0F\x3A\x42\110\26 SSE41
+PACKUSDW xmmreg,xmmrm \361\3\x0F\x38\x2B\110 SSE41
+PBLENDVB xmmreg,xmmrm,xmm0 \361\3\x0F\x38\x10\110 SSE41
+PBLENDW xmmreg,xmmrm,imm \361\3\x0F\x3A\x0E\110\26 SSE41
+PCMPEQQ xmmreg,xmmrm \361\3\x0F\x38\x29\110 SSE41
+PEXTRB reg32,xmmreg,imm \361\3\x0F\x3A\x14\101\26 SSE41
+PEXTRB mem8,xmmreg,imm \361\3\x0F\x3A\x14\101\26 SSE41
+PEXTRB reg64,xmmreg,imm \324\361\3\x0F\x3A\x14\101\26 SSE41,X64
+PEXTRD rm32,xmmreg,imm \361\3\x0F\x3A\x16\101\26 SSE41
+PEXTRQ rm64,xmmreg,imm \324\361\3\x0F\x3A\x16\101\26 SSE41,X64
+PEXTRW reg32,xmmreg,imm \361\3\x0F\x3A\x15\101\26 SSE41
+PEXTRW mem16,xmmreg,imm \361\3\x0F\x3A\x15\101\26 SSE41
+PEXTRW reg64,xmmreg,imm \324\361\3\x0F\x3A\x15\101\26 SSE41,X64
+PHMINPOSUW xmmreg,xmmrm \361\3\x0F\x38\x41\110 SSE41
+PINSRB xmmreg,mem,imm \361\3\x0F\x3A\x20\110\26 SSE41,SB,AR2
+PINSRB xmmreg,rm8,imm \325\361\3\x0F\x3A\x20\110\26 SSE41,SB,AR2
+PINSRB xmmreg,reg32,imm \361\3\x0F\x3A\x20\110\26 SSE41,SB,AR2
+PINSRD xmmreg,mem,imm \361\3\x0F\x3A\x22\110\26 SSE41,SB,AR2
+PINSRD xmmreg,rm32,imm \361\3\x0F\x3A\x22\110\26 SSE41,SB,AR2
+PINSRQ xmmreg,mem,imm \324\361\3\x0F\x3A\x22\110\26 SSE41,X64,SB,AR2
+PINSRQ xmmreg,rm64,imm \324\361\3\x0F\x3A\x22\110\26 SSE41,X64,SB,AR2
+PMAXSB xmmreg,xmmrm \361\3\x0F\x38\x3C\110 SSE41
+PMAXSD xmmreg,xmmrm \361\3\x0F\x38\x3D\110 SSE41
+PMAXUD xmmreg,xmmrm \361\3\x0F\x38\x3F\110 SSE41
+PMAXUW xmmreg,xmmrm \361\3\x0F\x38\x3E\110 SSE41
+PMINSB xmmreg,xmmrm \361\3\x0F\x38\x38\110 SSE41
+PMINSD xmmreg,xmmrm \361\3\x0F\x38\x39\110 SSE41
+PMINUD xmmreg,xmmrm \361\3\x0F\x38\x3B\110 SSE41
+PMINUW xmmreg,xmmrm \361\3\x0F\x38\x3A\110 SSE41
+PMOVSXBW xmmreg,xmmrm \361\3\x0F\x38\x20\110 SSE41,SQ
+PMOVSXBD xmmreg,xmmrm \361\3\x0F\x38\x21\110 SSE41,SD
+PMOVSXBQ xmmreg,xmmrm \361\3\x0F\x38\x22\110 SSE41,SW
+PMOVSXWD xmmreg,xmmrm \361\3\x0F\x38\x23\110 SSE41,SQ
+PMOVSXWQ xmmreg,xmmrm \361\3\x0F\x38\x24\110 SSE41,SD
+PMOVSXDQ xmmreg,xmmrm \361\3\x0F\x38\x25\110 SSE41,SQ
+PMOVZXBW xmmreg,xmmrm \361\3\x0F\x38\x30\110 SSE41,SQ
+PMOVZXBD xmmreg,xmmrm \361\3\x0F\x38\x31\110 SSE41,SD
+PMOVZXBQ xmmreg,xmmrm \361\3\x0F\x38\x32\110 SSE41,SW
+PMOVZXWD xmmreg,xmmrm \361\3\x0F\x38\x33\110 SSE41,SQ
+PMOVZXWQ xmmreg,xmmrm \361\3\x0F\x38\x34\110 SSE41,SD
+PMOVZXDQ xmmreg,xmmrm \361\3\x0F\x38\x35\110 SSE41,SQ
+PMULDQ xmmreg,xmmrm \361\3\x0F\x38\x28\110 SSE41
+PMULLD xmmreg,xmmrm \361\3\x0F\x38\x40\110 SSE41
+PTEST xmmreg,xmmrm \361\3\x0F\x38\x17\110 SSE41
+ROUNDPD xmmreg,xmmrm,imm \361\3\x0F\x3A\x09\110\26 SSE41
+ROUNDPS xmmreg,xmmrm,imm \361\3\x0F\x3A\x08\110\26 SSE41
+ROUNDSD xmmreg,xmmrm,imm \361\3\x0F\x3A\x0B\110\26 SSE41
+ROUNDSS xmmreg,xmmrm,imm \361\3\x0F\x3A\x0A\110\26 SSE41
+
+;# Nehalem New Instructions (SSE4.2)
+CRC32 reg32,rm8 \332\3\x0F\x38\xF0\110 SSE42
+CRC32 reg32,rm16 \320\332\3\x0F\x38\xF1\110 SSE42
+CRC32 reg32,rm32 \321\332\3\x0F\x38\xF1\110 SSE42
+CRC32 reg64,rm8 \324\332\3\x0F\x38\xF0\110 SSE42,X64
+CRC32 reg64,rm64 \324\332\3\x0F\x38\xF1\110 SSE42,X64
+PCMPESTRI xmmreg,xmmrm,imm \361\3\x0F\x3A\x61\110\26 SSE42
+PCMPESTRM xmmreg,xmmrm,imm \361\3\x0F\x3A\x60\110\26 SSE42
+PCMPISTRI xmmreg,xmmrm,imm \361\3\x0F\x3A\x63\110\26 SSE42
+PCMPISTRM xmmreg,xmmrm,imm \361\3\x0F\x3A\x62\110\26 SSE42
+PCMPGTQ xmmreg,xmmrm \361\3\x0F\x38\x37\110 SSE42
+POPCNT reg16,rm16 \320\333\2\x0F\xB8\110 NEHALEM,SW
+POPCNT reg32,rm32 \321\333\2\x0F\xB8\110 NEHALEM,SD
+POPCNT reg64,rm64 \324\333\2\x0F\xB8\110 NEHALEM,SQ,X64
+
+;# Intel SMX
+GETSEC void \2\x0F\x37 KATMAI
+
+;# Geode (Cyrix) 3DNow! additions
+PFRCPV mmxreg,mmxrm \323\2\x0F\x0F\110\1\x86 PENT,3DNOW,SQ,CYRIX
+PFRSQRTV mmxreg,mmxrm \323\2\x0F\x0F\110\1\x87 PENT,3DNOW,SQ,CYRIX
+
+;# Intel new instructions in ???
+; Is NEHALEM right here?
+MOVBE reg16,mem16 [rm: o16 0f 38 f0 /r] NEHALEM,SM
+MOVBE reg32,mem32 [rm: o32 0f 38 f0 /r] NEHALEM,SM
+MOVBE reg64,mem64 [rm: o64 0f 38 f0 /r] NEHALEM,SM
+MOVBE mem16,reg16 [mr: o16 0f 38 f1 /r] NEHALEM,SM
+MOVBE mem32,reg32 [mr: o32 0f 38 f1 /r] NEHALEM,SM
+MOVBE mem64,reg64 [mr: o64 0f 38 f1 /r] NEHALEM,SM
+
+;# Intel AES instructions
+AESENC xmmreg,xmmrm128 [rm: 66 0f 38 dc /r] SSE,WESTMERE
+AESENCLAST xmmreg,xmmrm128 [rm: 66 0f 38 dd /r] SSE,WESTMERE
+AESDEC xmmreg,xmmrm128 [rm: 66 0f 38 de /r] SSE,WESTMERE
+AESDECLAST xmmreg,xmmrm128 [rm: 66 0f 38 df /r] SSE,WESTMERE
+AESIMC xmmreg,xmmrm128 [rm: 66 0f 38 db /r] SSE,WESTMERE
+AESKEYGENASSIST xmmreg,xmmrm128,imm8 [rmi: 66 0f 3a df /r ib] SSE,WESTMERE
+
+;# Intel AVX AES instructions
+VAESENC xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 dc /r] AVX,SANDYBRIDGE
+VAESENCLAST xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 dd /r] AVX,SANDYBRIDGE
+VAESDEC xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 de /r] AVX,SANDYBRIDGE
+VAESDECLAST xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 df /r] AVX,SANDYBRIDGE
+VAESIMC xmmreg,xmmrm128 [rm: vex.128.66.0f38 db /r] AVX,SANDYBRIDGE
+VAESKEYGENASSIST xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a df /r ib] AVX,SANDYBRIDGE
+
+;# Intel AVX instructions
+VADDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 58 /r] AVX,SANDYBRIDGE
+VADDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 58 /r] AVX,SANDYBRIDGE
+VADDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 58 /r] AVX,SANDYBRIDGE
+VADDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 58 /r] AVX,SANDYBRIDGE
+VADDSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 58 /r] AVX,SANDYBRIDGE
+VADDSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 58 /r] AVX,SANDYBRIDGE
+VADDSUBPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d0 /r] AVX,SANDYBRIDGE
+VADDSUBPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f d0 /r] AVX,SANDYBRIDGE
+VADDSUBPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.f2.0f d0 /r] AVX,SANDYBRIDGE
+VADDSUBPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.f2.0f d0 /r] AVX,SANDYBRIDGE
+VANDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 54 /r] AVX,SANDYBRIDGE
+VANDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 54 /r] AVX,SANDYBRIDGE
+VANDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 54 /r] AVX,SANDYBRIDGE
+VANDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 54 /r] AVX,SANDYBRIDGE
+VANDNPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 55 /r] AVX,SANDYBRIDGE
+VANDNPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 55 /r] AVX,SANDYBRIDGE
+VANDNPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 55 /r] AVX,SANDYBRIDGE
+VANDNPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 55 /r] AVX,SANDYBRIDGE
+VBLENDPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0d /r ib] AVX,SANDYBRIDGE
+VBLENDPD ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 0d /r ib] AVX,SANDYBRIDGE
+VBLENDPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0c /r ib] AVX,SANDYBRIDGE
+VBLENDPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 0c /r ib] AVX,SANDYBRIDGE
+VBLENDVPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.nds.128.66.0f3a.w0 4b /r /is4] AVX,SANDYBRIDGE
+VBLENDVPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.nds.256.66.0f3a.w0 4b /r /is4] AVX,SANDYBRIDGE
+VBLENDVPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.nds.128.66.0f3a.w0 4a /r /is4] AVX,SANDYBRIDGE
+VBLENDVPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.nds.256.66.0f3a.w0 4a /r /is4] AVX,SANDYBRIDGE
+VBROADCASTSS xmmreg,mem32 [rm: vex.128.66.0f38.w0 18 /r] AVX,SANDYBRIDGE
+VBROADCASTSS ymmreg,mem32 [rm: vex.256.66.0f38.w0 18 /r] AVX,SANDYBRIDGE
+VBROADCASTSD ymmreg,mem64 [rm: vex.256.66.0f38.w0 19 /r] AVX,SANDYBRIDGE
+VBROADCASTF128 ymmreg,mem128 [rm: vex.256.66.0f38.w0 1a /r] AVX,SANDYBRIDGE
+; Specific aliases first, then the generic version, to keep the disassembler happy...
+VCMPEQ_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQ_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPLT_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLT_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLE_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLE_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPUNORD_QPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORD_QPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPNEQ_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQ_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNLT_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLT_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLE_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLE_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPORD_QPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORD_QPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPEQ_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 08] AVX,SANDYBRIDGE
+VCMPEQ_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 08] AVX,SANDYBRIDGE
+VCMPNGE_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGE_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGT_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGT_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPFALSE_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSE_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPNEQ_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0c] AVX,SANDYBRIDGE
+VCMPNEQ_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0c] AVX,SANDYBRIDGE
+VCMPGE_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGE_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGT_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGT_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPTRUE_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUE_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPEQ_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 10] AVX,SANDYBRIDGE
+VCMPEQ_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 10] AVX,SANDYBRIDGE
+VCMPLT_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 11] AVX,SANDYBRIDGE
+VCMPLT_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 11] AVX,SANDYBRIDGE
+VCMPLE_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 12] AVX,SANDYBRIDGE
+VCMPLE_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 12] AVX,SANDYBRIDGE
+VCMPUNORD_SPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 13] AVX,SANDYBRIDGE
+VCMPUNORD_SPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 13] AVX,SANDYBRIDGE
+VCMPNEQ_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 14] AVX,SANDYBRIDGE
+VCMPNEQ_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 14] AVX,SANDYBRIDGE
+VCMPNLT_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 15] AVX,SANDYBRIDGE
+VCMPNLT_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 15] AVX,SANDYBRIDGE
+VCMPNLE_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 16] AVX,SANDYBRIDGE
+VCMPNLE_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 16] AVX,SANDYBRIDGE
+VCMPORD_SPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 17] AVX,SANDYBRIDGE
+VCMPORD_SPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 17] AVX,SANDYBRIDGE
+VCMPEQ_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 18] AVX,SANDYBRIDGE
+VCMPEQ_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 18] AVX,SANDYBRIDGE
+VCMPNGE_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 19] AVX,SANDYBRIDGE
+VCMPNGE_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 19] AVX,SANDYBRIDGE
+VCMPNGT_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1a] AVX,SANDYBRIDGE
+VCMPNGT_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1a] AVX,SANDYBRIDGE
+VCMPFALSE_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1b] AVX,SANDYBRIDGE
+VCMPFALSE_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1b] AVX,SANDYBRIDGE
+VCMPNEQ_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1c] AVX,SANDYBRIDGE
+VCMPNEQ_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1c] AVX,SANDYBRIDGE
+VCMPGE_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1d] AVX,SANDYBRIDGE
+VCMPGE_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1d] AVX,SANDYBRIDGE
+VCMPGT_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1e] AVX,SANDYBRIDGE
+VCMPGT_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1e] AVX,SANDYBRIDGE
+VCMPTRUE_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1f] AVX,SANDYBRIDGE
+VCMPTRUE_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1f] AVX,SANDYBRIDGE
+VCMPPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f c2 /r ib] AVX,SANDYBRIDGE
+VCMPPD ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f c2 /r ib] AVX,SANDYBRIDGE
+; Specific aliases first, then the generic version, to keep the disassembler happy...
+VCMPEQ_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQ_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPLT_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLT_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLE_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLE_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPUNORD_QPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORD_QPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPNEQ_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQ_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNLT_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLT_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLE_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLE_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPORD_QPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORD_QPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPEQ_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 08] AVX,SANDYBRIDGE
+VCMPEQ_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 08] AVX,SANDYBRIDGE
+VCMPNGE_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGE_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGT_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGT_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPFALSE_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSE_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPNEQ_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0c] AVX,SANDYBRIDGE
+VCMPNEQ_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0c] AVX,SANDYBRIDGE
+VCMPGE_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGE_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGT_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGT_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPTRUE_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUE_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPEQ_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 10] AVX,SANDYBRIDGE
+VCMPEQ_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 10] AVX,SANDYBRIDGE
+VCMPLT_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 11] AVX,SANDYBRIDGE
+VCMPLT_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 11] AVX,SANDYBRIDGE
+VCMPLE_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 12] AVX,SANDYBRIDGE
+VCMPLE_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 12] AVX,SANDYBRIDGE
+VCMPUNORD_SPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 13] AVX,SANDYBRIDGE
+VCMPUNORD_SPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 13] AVX,SANDYBRIDGE
+VCMPNEQ_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 14] AVX,SANDYBRIDGE
+VCMPNEQ_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 14] AVX,SANDYBRIDGE
+VCMPNLT_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 15] AVX,SANDYBRIDGE
+VCMPNLT_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 15] AVX,SANDYBRIDGE
+VCMPNLE_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 16] AVX,SANDYBRIDGE
+VCMPNLE_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 16] AVX,SANDYBRIDGE
+VCMPORD_SPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 17] AVX,SANDYBRIDGE
+VCMPORD_SPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 17] AVX,SANDYBRIDGE
+VCMPEQ_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 18] AVX,SANDYBRIDGE
+VCMPEQ_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 18] AVX,SANDYBRIDGE
+VCMPNGE_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 19] AVX,SANDYBRIDGE
+VCMPNGE_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 19] AVX,SANDYBRIDGE
+VCMPNGT_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1a] AVX,SANDYBRIDGE
+VCMPNGT_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1a] AVX,SANDYBRIDGE
+VCMPFALSE_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1b] AVX,SANDYBRIDGE
+VCMPFALSE_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1b] AVX,SANDYBRIDGE
+VCMPNEQ_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1c] AVX,SANDYBRIDGE
+VCMPNEQ_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1c] AVX,SANDYBRIDGE
+VCMPGE_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1d] AVX,SANDYBRIDGE
+VCMPGE_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1d] AVX,SANDYBRIDGE
+VCMPGT_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1e] AVX,SANDYBRIDGE
+VCMPGT_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1e] AVX,SANDYBRIDGE
+VCMPTRUE_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1f] AVX,SANDYBRIDGE
+VCMPTRUE_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1f] AVX,SANDYBRIDGE
+VCMPPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.0f c2 /r ib] AVX,SANDYBRIDGE
+VCMPPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.0f c2 /r ib] AVX,SANDYBRIDGE
+; Specific aliases first, then the generic version, to keep the disassembler happy...
+VCMPEQ_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPLT_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLE_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPUNORD_QSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORDSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPNEQ_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNLT_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLE_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPORD_QSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORDSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPEQ_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 08] AVX,SANDYBRIDGE
+VCMPNGE_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGT_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPFALSE_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPNEQ_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0c] AVX,SANDYBRIDGE
+VCMPGE_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGT_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPTRUE_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPEQ_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 10] AVX,SANDYBRIDGE
+VCMPLT_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 11] AVX,SANDYBRIDGE
+VCMPLE_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 12] AVX,SANDYBRIDGE
+VCMPUNORD_SSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 13] AVX,SANDYBRIDGE
+VCMPNEQ_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 14] AVX,SANDYBRIDGE
+VCMPNLT_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 15] AVX,SANDYBRIDGE
+VCMPNLE_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 16] AVX,SANDYBRIDGE
+VCMPORD_SSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 17] AVX,SANDYBRIDGE
+VCMPEQ_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 18] AVX,SANDYBRIDGE
+VCMPNGE_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 19] AVX,SANDYBRIDGE
+VCMPNGT_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1a] AVX,SANDYBRIDGE
+VCMPFALSE_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1b] AVX,SANDYBRIDGE
+VCMPNEQ_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1c] AVX,SANDYBRIDGE
+VCMPGE_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1d] AVX,SANDYBRIDGE
+VCMPGT_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1e] AVX,SANDYBRIDGE
+VCMPTRUE_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1f] AVX,SANDYBRIDGE
+VCMPSD xmmreg,xmmreg*,xmmrm64,imm8 [rvmi: vex.nds.lig.f2.0f c2 /r ib] AVX,SANDYBRIDGE
+; Specific aliases first, then the generic version, to keep the disassembler happy...
+VCMPEQ_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPEQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 00] AVX,SANDYBRIDGE
+VCMPLT_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 01] AVX,SANDYBRIDGE
+VCMPLE_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPLESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 02] AVX,SANDYBRIDGE
+VCMPUNORD_QSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPUNORDSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 03] AVX,SANDYBRIDGE
+VCMPNEQ_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNEQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 04] AVX,SANDYBRIDGE
+VCMPNLT_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 05] AVX,SANDYBRIDGE
+VCMPNLE_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPNLESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 06] AVX,SANDYBRIDGE
+VCMPORD_QSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPORDSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 07] AVX,SANDYBRIDGE
+VCMPEQ_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 08] AVX,SANDYBRIDGE
+VCMPNGE_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 09] AVX,SANDYBRIDGE
+VCMPNGT_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPNGTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0a] AVX,SANDYBRIDGE
+VCMPFALSE_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPFALSESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0b] AVX,SANDYBRIDGE
+VCMPNEQ_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0c] AVX,SANDYBRIDGE
+VCMPGE_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0d] AVX,SANDYBRIDGE
+VCMPGT_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPGTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0e] AVX,SANDYBRIDGE
+VCMPTRUE_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPTRUESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0f] AVX,SANDYBRIDGE
+VCMPEQ_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 10] AVX,SANDYBRIDGE
+VCMPLT_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 11] AVX,SANDYBRIDGE
+VCMPLE_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 12] AVX,SANDYBRIDGE
+VCMPUNORD_SSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 13] AVX,SANDYBRIDGE
+VCMPNEQ_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 14] AVX,SANDYBRIDGE
+VCMPNLT_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 15] AVX,SANDYBRIDGE
+VCMPNLE_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 16] AVX,SANDYBRIDGE
+VCMPORD_SSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 17] AVX,SANDYBRIDGE
+VCMPEQ_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 18] AVX,SANDYBRIDGE
+VCMPNGE_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 19] AVX,SANDYBRIDGE
+VCMPNGT_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1a] AVX,SANDYBRIDGE
+VCMPFALSE_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1b] AVX,SANDYBRIDGE
+VCMPNEQ_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1c] AVX,SANDYBRIDGE
+VCMPGE_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1d] AVX,SANDYBRIDGE
+VCMPGT_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1e] AVX,SANDYBRIDGE
+VCMPTRUE_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1f] AVX,SANDYBRIDGE
+VCMPSS xmmreg,xmmreg*,xmmrm64,imm8 [rvmi: vex.nds.lig.f3.0f c2 /r ib] AVX,SANDYBRIDGE
+VCOMISD xmmreg,xmmrm64 [rm: vex.lig.66.0f 2f /r] AVX,SANDYBRIDGE
+VCOMISS xmmreg,xmmrm32 [rm: vex.lig.0f 2f /r] AVX,SANDYBRIDGE
+VCVTDQ2PD xmmreg,xmmrm64 [rm: vex.128.f3.0f e6 /r] AVX,SANDYBRIDGE
+VCVTDQ2PD ymmreg,xmmrm128 [rm: vex.256.f3.0f e6 /r] AVX,SANDYBRIDGE
+VCVTDQ2PS xmmreg,xmmrm128 [rm: vex.128.0f 5b /r] AVX,SANDYBRIDGE
+VCVTDQ2PS ymmreg,ymmrm256 [rm: vex.256.0f 5b /r] AVX,SANDYBRIDGE
+VCVTPD2DQ xmmreg,xmmreg [rm: vex.128.f2.0f e6 /r] AVX,SANDYBRIDGE
+VCVTPD2DQ xmmreg,mem128 [rm: vex.128.f2.0f e6 /r] AVX,SANDYBRIDGE,SO
+VCVTPD2DQ xmmreg,ymmreg [rm: vex.256.f2.0f e6 /r] AVX,SANDYBRIDGE
+VCVTPD2DQ xmmreg,mem256 [rm: vex.256.f2.0f e6 /r] AVX,SANDYBRIDGE,SY
+VCVTPD2PS xmmreg,xmmreg [rm: vex.128.66.0f 5a /r] AVX,SANDYBRIDGE
+VCVTPD2PS xmmreg,mem128 [rm: vex.128.66.0f 5a /r] AVX,SANDYBRIDGE,SO
+VCVTPD2PS xmmreg,ymmreg [rm: vex.256.66.0f 5a /r] AVX,SANDYBRIDGE
+VCVTPD2PS xmmreg,mem256 [rm: vex.256.66.0f 5a /r] AVX,SANDYBRIDGE,SY
+VCVTPS2DQ xmmreg,xmmrm128 [rm: vex.128.66.0f 5b /r] AVX,SANDYBRIDGE
+VCVTPS2DQ ymmreg,ymmrm256 [rm: vex.256.66.0f 5b /r] AVX,SANDYBRIDGE
+VCVTPS2PD xmmreg,xmmrm64 [rm: vex.128.0f 5a /r] AVX,SANDYBRIDGE
+VCVTPS2PD ymmreg,xmmrm128 [rm: vex.256.0f 5a /r] AVX,SANDYBRIDGE
+VCVTSD2SI reg32,xmmrm64 [rm: vex.lig.f2.0f.w0 2d /r] AVX,SANDYBRIDGE
+VCVTSD2SI reg64,xmmrm64 [rm: vex.lig.f2.0f.w1 2d /r] AVX,SANDYBRIDGE,LONG
+VCVTSD2SS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5a /r] AVX,SANDYBRIDGE
+VCVTSI2SD xmmreg,xmmreg*,rm32 [rvm: vex.nds.lig.f2.0f.w0 2a /r] AVX,SANDYBRIDGE,SD
+VCVTSI2SD xmmreg,xmmreg*,mem32 [rvm: vex.nds.lig.f2.0f.w0 2a /r] AVX,SANDYBRIDGE,ND,SD
+VCVTSI2SD xmmreg,xmmreg*,rm64 [rvm: vex.nds.lig.f2.0f.w1 2a /r] AVX,SANDYBRIDGE,LONG,SQ
+VCVTSI2SS xmmreg,xmmreg*,rm32 [rvm: vex.nds.lig.f3.0f.w0 2a /r] AVX,SANDYBRIDGE,SD
+VCVTSI2SS xmmreg,xmmreg*,mem32 [rvm: vex.nds.lig.f3.0f.w0 2a /r] AVX,SANDYBRIDGE,ND,SD
+VCVTSI2SS xmmreg,xmmreg*,rm64 [rvm: vex.nds.lig.f3.0f.w1 2a /r] AVX,SANDYBRIDGE,LONG,SQ
+VCVTSS2SD xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5a /r] AVX,SANDYBRIDGE
+VCVTSS2SI reg32,xmmrm32 [rm: vex.lig.f3.0f.w0 2d /r] AVX,SANDYBRIDGE
+VCVTSS2SI reg64,xmmrm32 [rm: vex.lig.f3.0f.w1 2d /r] AVX,SANDYBRIDGE,LONG
+VCVTTPD2DQ xmmreg,xmmreg [rm: vex.128.66.0f e6 /r] AVX,SANDYBRIDGE
+VCVTTPD2DQ xmmreg,mem128 [rm: vex.128.66.0f e6 /r] AVX,SANDYBRIDGE,SO
+VCVTTPD2DQ xmmreg,ymmreg [rm: vex.256.66.0f e6 /r] AVX,SANDYBRIDGE
+VCVTTPD2DQ xmmreg,mem256 [rm: vex.256.66.0f e6 /r] AVX,SANDYBRIDGE,SY
+VCVTTPS2DQ xmmreg,xmmrm128 [rm: vex.128.f3.0f 5b /r] AVX,SANDYBRIDGE
+VCVTTPS2DQ ymmreg,ymmrm256 [rm: vex.256.f3.0f 5b /r] AVX,SANDYBRIDGE
+VCVTTSD2SI reg32,xmmrm64 [rm: vex.lig.f2.0f.w0 2c /r] AVX,SANDYBRIDGE
+VCVTTSD2SI reg64,xmmrm64 [rm: vex.lig.f2.0f.w1 2c /r] AVX,SANDYBRIDGE,LONG
+VCVTTSS2SI reg32,xmmrm32 [rm: vex.lig.f3.0f.w0 2c /r] AVX,SANDYBRIDGE
+VCVTTSS2SI reg64,xmmrm32 [rm: vex.lig.f3.0f.w1 2c /r] AVX,SANDYBRIDGE,LONG
+VDIVPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5e /r] AVX,SANDYBRIDGE
+VDIVPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5e /r] AVX,SANDYBRIDGE
+VDIVPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5e /r] AVX,SANDYBRIDGE
+VDIVPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5e /r] AVX,SANDYBRIDGE
+VDIVSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5e /r] AVX,SANDYBRIDGE
+VDIVSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5e /r] AVX,SANDYBRIDGE
+VDPPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 41 /r ib] AVX,SANDYBRIDGE
+VDPPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 40 /r ib] AVX,SANDYBRIDGE
+VDPPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 40 /r ib] AVX,SANDYBRIDGE
+VEXTRACTF128 xmmrm128,ymmreg,imm8 [mri: vex.256.66.0f3a.w0 19 /r ib] AVX,SANDYBRIDGE
+VEXTRACTPS rm32,xmmreg,imm8 [mri: vex.128.66.0f3a 17 /r ib] AVX,SANDYBRIDGE
+VHADDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 7c /r] AVX,SANDYBRIDGE
+VHADDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 7c /r] AVX,SANDYBRIDGE
+VHADDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.f2.0f 7c /r] AVX,SANDYBRIDGE
+VHADDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.f2.0f 7c /r] AVX,SANDYBRIDGE
+VHSUBPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 7d /r] AVX,SANDYBRIDGE
+VHSUBPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 7d /r] AVX,SANDYBRIDGE
+VHSUBPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.f2.0f 7d /r] AVX,SANDYBRIDGE
+VHSUBPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.f2.0f 7d /r] AVX,SANDYBRIDGE
+VINSERTF128 ymmreg,ymmreg,xmmrm128,imm8 [rvmi: vex.nds.256.66.0f3a.w0 18 /r ib] AVX,SANDYBRIDGE
+VINSERTPS xmmreg,xmmreg*,xmmrm32,imm8 [rvmi: vex.nds.128.66.0f3a 21 /r ib] AVX,SANDYBRIDGE
+VLDDQU xmmreg,mem128 [rm: vex.128.f2.0f f0 /r] AVX,SANDYBRIDGE
+VLDQQU ymmreg,mem256 [rm: vex.256.f2.0f f0 /r] AVX,SANDYBRIDGE
+VLDDQU ymmreg,mem256 [rm: vex.256.f2.0f f0 /r] AVX,SANDYBRIDGE
+VLDMXCSR mem32 [m: vex.lz.0f ae /2] AVX,SANDYBRIDGE
+VMASKMOVDQU xmmreg,xmmreg [rm: vex.128.66.0f f7 /r] AVX,SANDYBRIDGE
+VMASKMOVPS xmmreg,xmmreg,mem128 [rvm: vex.nds.128.66.0f38.w0 2c /r] AVX,SANDYBRIDGE
+VMASKMOVPS ymmreg,ymmreg,mem256 [rvm: vex.nds.256.66.0f38.w0 2c /r] AVX,SANDYBRIDGE
+VMASKMOVPS mem128,xmmreg,xmmreg [mvr: vex.nds.128.66.0f38.w0 2e /r] AVX,SANDYBRIDGE,SO
+VMASKMOVPS mem256,ymmreg,ymmreg [mvr: vex.nds.256.66.0f38.w0 2e /r] AVX,SANDYBRIDGE,SY
+VMASKMOVPD xmmreg,xmmreg,mem128 [rvm: vex.nds.128.66.0f38.w0 2d /r] AVX,SANDYBRIDGE
+VMASKMOVPD ymmreg,ymmreg,mem256 [rvm: vex.nds.256.66.0f38.w0 2d /r] AVX,SANDYBRIDGE
+VMASKMOVPD mem128,xmmreg,xmmreg [mvr: vex.nds.128.66.0f38.w0 2f /r] AVX,SANDYBRIDGE
+VMASKMOVPD mem256,ymmreg,ymmreg [mvr: vex.nds.256.66.0f38.w0 2f /r] AVX,SANDYBRIDGE
+VMAXPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5f /r] AVX,SANDYBRIDGE
+VMAXPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5f /r] AVX,SANDYBRIDGE
+VMAXPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5f /r] AVX,SANDYBRIDGE
+VMAXPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5f /r] AVX,SANDYBRIDGE
+VMAXSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5f /r] AVX,SANDYBRIDGE
+VMAXSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5f /r] AVX,SANDYBRIDGE
+VMINPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5d /r] AVX,SANDYBRIDGE
+VMINPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5d /r] AVX,SANDYBRIDGE
+VMINPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5d /r] AVX,SANDYBRIDGE
+VMINPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5d /r] AVX,SANDYBRIDGE
+VMINSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5d /r] AVX,SANDYBRIDGE
+VMINSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5d /r] AVX,SANDYBRIDGE
+VMOVAPD xmmreg,xmmrm128 [rm: vex.128.66.0f 28 /r] AVX,SANDYBRIDGE
+VMOVAPD xmmrm128,xmmreg [mr: vex.128.66.0f 29 /r] AVX,SANDYBRIDGE
+VMOVAPD ymmreg,ymmrm256 [rm: vex.256.66.0f 28 /r] AVX,SANDYBRIDGE
+VMOVAPD ymmrm256,ymmreg [mr: vex.256.66.0f 29 /r] AVX,SANDYBRIDGE
+VMOVAPS xmmreg,xmmrm128 [rm: vex.128.0f 28 /r] AVX,SANDYBRIDGE
+VMOVAPS xmmrm128,xmmreg [mr: vex.128.0f 29 /r] AVX,SANDYBRIDGE
+VMOVAPS ymmreg,ymmrm256 [rm: vex.256.0f 28 /r] AVX,SANDYBRIDGE
+VMOVAPS ymmrm256,ymmreg [mr: vex.256.0f 29 /r] AVX,SANDYBRIDGE
+VMOVD xmmreg,rm32 [rm: vex.128.66.0f.w0 6e /r] AVX,SANDYBRIDGE
+VMOVD rm32,xmmreg [mr: vex.128.66.0f.w0 7e /r] AVX,SANDYBRIDGE
+VMOVQ xmmreg,xmmrm64 [rm: vex.128.f3.0f 7e /r] AVX,SANDYBRIDGE,SQ
+VMOVQ xmmrm64,xmmreg [mr: vex.128.66.0f d6 /r] AVX,SANDYBRIDGE,SQ
+VMOVQ xmmreg,rm64 [rm: vex.128.66.0f.w1 6e /r] AVX,SANDYBRIDGE,LONG,SQ
+VMOVQ rm64,xmmreg [mr: vex.128.66.0f.w1 7e /r] AVX,SANDYBRIDGE,LONG,SQ
+VMOVDDUP xmmreg,xmmrm64 [rm: vex.128.f2.0f 12 /r] AVX,SANDYBRIDGE
+VMOVDDUP ymmreg,ymmrm256 [rm: vex.256.f2.0f 12 /r] AVX,SANDYBRIDGE
+VMOVDQA xmmreg,xmmrm128 [rm: vex.128.66.0f 6f /r] AVX,SANDYBRIDGE
+VMOVDQA xmmrm128,xmmreg [mr: vex.128.66.0f 7f /r] AVX,SANDYBRIDGE
+; These are officially documented as VMOVDQA, but VMOVQQA seems more logical to me...
+VMOVQQA ymmreg,ymmrm256 [rm: vex.256.66.0f 6f /r] AVX,SANDYBRIDGE
+VMOVQQA ymmrm256,ymmreg [mr: vex.256.66.0f 7f /r] AVX,SANDYBRIDGE
+VMOVDQA ymmreg,ymmrm [rm: vex.256.66.0f 6f /r] AVX,SANDYBRIDGE
+VMOVDQA ymmrm256,ymmreg [mr: vex.256.66.0f 7f /r] AVX,SANDYBRIDGE
+VMOVDQU xmmreg,xmmrm128 [rm: vex.128.f3.0f 6f /r] AVX,SANDYBRIDGE
+VMOVDQU xmmrm128,xmmreg [mr: vex.128.f3.0f 7f /r] AVX,SANDYBRIDGE
+; These are officially documented as VMOVDQU, but VMOVQQU seems more logical to me...
+VMOVQQU ymmreg,ymmrm256 [rm: vex.256.f3.0f 6f /r] AVX,SANDYBRIDGE
+VMOVQQU ymmrm256,ymmreg [mr: vex.256.f3.0f 7f /r] AVX,SANDYBRIDGE
+VMOVDQU ymmreg,ymmrm256 [rm: vex.256.f3.0f 6f /r] AVX,SANDYBRIDGE
+VMOVDQU ymmrm256,ymmreg [mr: vex.256.f3.0f 7f /r] AVX,SANDYBRIDGE
+VMOVHLPS xmmreg,xmmreg*,xmmreg [rvm: vex.nds.128.0f 12 /r] AVX,SANDYBRIDGE
+VMOVHPD xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.66.0f 16 /r] AVX,SANDYBRIDGE
+VMOVHPD mem64,xmmreg [mr: vex.128.66.0f 17 /r] AVX,SANDYBRIDGE
+VMOVHPS xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.0f 16 /r] AVX,SANDYBRIDGE
+VMOVHPS mem64,xmmreg [mr: vex.128.0f 17 /r] AVX,SANDYBRIDGE
+VMOVLHPS xmmreg,xmmreg*,xmmreg [rvm: vex.nds.128.0f 16 /r] AVX,SANDYBRIDGE
+VMOVLPD xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.66.0f 12 /r] AVX,SANDYBRIDGE
+VMOVLPD mem64,xmmreg [mr: vex.128.66.0f 13 /r] AVX,SANDYBRIDGE
+VMOVLPS xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.0f 12 /r] AVX,SANDYBRIDGE
+VMOVLPS mem64,xmmreg [mr: vex.128.0f 13 /r] AVX,SANDYBRIDGE
+VMOVMSKPD reg64,xmmreg [rm: vex.128.66.0f 50 /r] AVX,SANDYBRIDGE,LONG
+VMOVMSKPD reg32,xmmreg [rm: vex.128.66.0f 50 /r] AVX,SANDYBRIDGE
+VMOVMSKPD reg64,ymmreg [rm: vex.256.66.0f 50 /r] AVX,SANDYBRIDGE,LONG
+VMOVMSKPD reg32,ymmreg [rm: vex.256.66.0f 50 /r] AVX,SANDYBRIDGE
+VMOVMSKPS reg64,xmmreg [rm: vex.128.0f 50 /r] AVX,SANDYBRIDGE,LONG
+VMOVMSKPS reg32,xmmreg [rm: vex.128.0f 50 /r] AVX,SANDYBRIDGE
+VMOVMSKPS reg64,ymmreg [rm: vex.256.0f 50 /r] AVX,SANDYBRIDGE,LONG
+VMOVMSKPS reg32,ymmreg [rm: vex.256.0f 50 /r] AVX,SANDYBRIDGE
+VMOVNTDQ mem128,xmmreg [mr: vex.128.66.0f e7 /r] AVX,SANDYBRIDGE
+; Officially VMOVNTDQ, but VMOVNTQQ seems more logical to me...
+VMOVNTQQ mem256,ymmreg [mr: vex.256.66.0f e7 /r] AVX,SANDYBRIDGE
+VMOVNTDQ mem256,ymmreg [mr: vex.256.66.0f e7 /r] AVX,SANDYBRIDGE
+VMOVNTDQA xmmreg,mem128 [rm: vex.128.66.0f38 2a /r] AVX,SANDYBRIDGE
+VMOVNTPD mem128,xmmreg [mr: vex.128.66.0f 2b /r] AVX,SANDYBRIDGE
+VMOVNTPD mem256,ymmreg [mr: vex.256.66.0f 2b /r] AVX,SANDYBRIDGE
+VMOVNTPS mem128,xmmreg [mr: vex.128.0f 2b /r] AVX,SANDYBRIDGE
+VMOVNTPS mem128,ymmreg [mr: vex.256.0f 2b /r] AVX,SANDYBRIDGE
+VMOVSD xmmreg,xmmreg*,xmmreg [rvm: vex.nds.lig.f2.0f 10 /r] AVX,SANDYBRIDGE
+VMOVSD xmmreg,mem64 [rm: vex.lig.f2.0f 10 /r] AVX,SANDYBRIDGE
+VMOVSD xmmreg,xmmreg*,xmmreg [mvr: vex.nds.lig.f2.0f 11 /r] AVX,SANDYBRIDGE
+VMOVSD mem64,xmmreg [mr: vex.lig.f2.0f 11 /r] AVX,SANDYBRIDGE
+VMOVSHDUP xmmreg,xmmrm128 [rm: vex.128.f3.0f 16 /r] AVX,SANDYBRIDGE
+VMOVSHDUP ymmreg,ymmrm256 [rm: vex.256.f3.0f 16 /r] AVX,SANDYBRIDGE
+VMOVSLDUP xmmreg,xmmrm128 [rm: vex.128.f3.0f 12 /r] AVX,SANDYBRIDGE
+VMOVSLDUP ymmreg,ymmrm256 [rm: vex.256.f3.0f 12 /r] AVX,SANDYBRIDGE
+VMOVSS xmmreg,xmmreg*,xmmreg [rvm: vex.nds.lig.f3.0f 10 /r] AVX,SANDYBRIDGE
+VMOVSS xmmreg,mem64 [rm: vex.lig.f3.0f 10 /r] AVX,SANDYBRIDGE
+VMOVSS xmmreg,xmmreg*,xmmreg [mvr: vex.nds.lig.f3.0f 11 /r] AVX,SANDYBRIDGE
+VMOVSS mem64,xmmreg [mr: vex.lig.f3.0f 11 /r] AVX,SANDYBRIDGE
+VMOVUPD xmmreg,xmmrm128 [rm: vex.128.66.0f 10 /r] AVX,SANDYBRIDGE
+VMOVUPD xmmrm128,xmmreg [mr: vex.128.66.0f 11 /r] AVX,SANDYBRIDGE
+VMOVUPD ymmreg,ymmrm256 [rm: vex.256.66.0f 10 /r] AVX,SANDYBRIDGE
+VMOVUPD ymmrm256,ymmreg [mr: vex.256.66.0f 11 /r] AVX,SANDYBRIDGE
+VMOVUPS xmmreg,xmmrm128 [rm: vex.128.0f 10 /r] AVX,SANDYBRIDGE
+VMOVUPS xmmrm128,xmmreg [mr: vex.128.0f 11 /r] AVX,SANDYBRIDGE
+VMOVUPS ymmreg,ymmrm256 [rm: vex.256.0f 10 /r] AVX,SANDYBRIDGE
+VMOVUPS ymmrm256,ymmreg [mr: vex.256.0f 11 /r] AVX,SANDYBRIDGE
+VMPSADBW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 42 /r ib] AVX,SANDYBRIDGE
+VMULPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 59 /r] AVX,SANDYBRIDGE
+VMULPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 59 /r] AVX,SANDYBRIDGE
+VMULPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 59 /r] AVX,SANDYBRIDGE
+VMULPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 59 /r] AVX,SANDYBRIDGE
+VMULSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 59 /r] AVX,SANDYBRIDGE
+VMULSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 59 /r] AVX,SANDYBRIDGE
+VORPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 56 /r] AVX,SANDYBRIDGE
+VORPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 56 /r] AVX,SANDYBRIDGE
+VORPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 56 /r] AVX,SANDYBRIDGE
+VORPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 56 /r] AVX,SANDYBRIDGE
+VPABSB xmmreg,xmmrm128 [rm: vex.128.66.0f38 1c /r] AVX,SANDYBRIDGE
+VPABSW xmmreg,xmmrm128 [rm: vex.128.66.0f38 1d /r] AVX,SANDYBRIDGE
+VPABSD xmmreg,xmmrm128 [rm: vex.128.66.0f38 1e /r] AVX,SANDYBRIDGE
+VPACKSSWB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 63 /r] AVX,SANDYBRIDGE
+VPACKSSDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6b /r] AVX,SANDYBRIDGE
+VPACKUSWB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 67 /r] AVX,SANDYBRIDGE
+VPACKUSDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 2b /r] AVX,SANDYBRIDGE
+VPADDB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fc /r] AVX,SANDYBRIDGE
+VPADDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fd /r] AVX,SANDYBRIDGE
+VPADDD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fe /r] AVX,SANDYBRIDGE
+VPADDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d4 /r] AVX,SANDYBRIDGE
+VPADDSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ec /r] AVX,SANDYBRIDGE
+VPADDSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ed /r] AVX,SANDYBRIDGE
+VPADDUSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f dc /r] AVX,SANDYBRIDGE
+VPADDUSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f dd /r] AVX,SANDYBRIDGE
+VPALIGNR xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0f /r ib] AVX,SANDYBRIDGE
+VPAND xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f db /r] AVX,SANDYBRIDGE
+VPANDN xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f df /r] AVX,SANDYBRIDGE
+VPAVGB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e0 /r] AVX,SANDYBRIDGE
+VPAVGW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e3 /r] AVX,SANDYBRIDGE
+VPBLENDVB xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.nds.128.66.0f3a.w0 4c /r /is4] AVX,SANDYBRIDGE
+VPBLENDW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0e /r ib] AVX,SANDYBRIDGE
+VPCMPESTRI xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 61 /r ib] AVX,SANDYBRIDGE
+VPCMPESTRM xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 60 /r ib] AVX,SANDYBRIDGE
+VPCMPISTRI xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 63 /r ib] AVX,SANDYBRIDGE
+VPCMPISTRM xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 62 /r ib] AVX,SANDYBRIDGE
+VPCMPEQB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 74 /r] AVX,SANDYBRIDGE
+VPCMPEQW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 75 /r] AVX,SANDYBRIDGE
+VPCMPEQD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 76 /r] AVX,SANDYBRIDGE
+VPCMPEQQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 29 /r] AVX,SANDYBRIDGE
+VPCMPGTB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 64 /r] AVX,SANDYBRIDGE
+VPCMPGTW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 65 /r] AVX,SANDYBRIDGE
+VPCMPGTD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 66 /r] AVX,SANDYBRIDGE
+VPCMPGTQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 37 /r] AVX,SANDYBRIDGE
+VPERMILPD xmmreg,xmmreg,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 0d /r] AVX,SANDYBRIDGE
+VPERMILPD ymmreg,ymmreg,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 0d /r] AVX,SANDYBRIDGE
+VPERMILPD xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a.w0 05 /r ib] AVX,SANDYBRIDGE
+VPERMILPD ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a.w0 05 /r ib] AVX,SANDYBRIDGE
+VPERMILPS xmmreg,xmmreg,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 0c /r] AVX,SANDYBRIDGE
+VPERMILPS ymmreg,ymmreg,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 0c /r] AVX,SANDYBRIDGE
+VPERMILPS xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a.w0 04 /r ib] AVX,SANDYBRIDGE
+VPERMILPS ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a.w0 04 /r ib] AVX,SANDYBRIDGE
+VPERM2F128 ymmreg,ymmreg,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a.w0 06 /r ib] AVX,SANDYBRIDGE
+VPEXTRB reg64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 14 /r ib] AVX,SANDYBRIDGE,LONG
+VPEXTRB reg32,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 14 /r ib] AVX,SANDYBRIDGE
+VPEXTRB mem8,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 14 /r ib] AVX,SANDYBRIDGE
+VPEXTRW reg64,xmmreg,imm8 [rmi: vex.128.66.0f.w0 c5 /r ib] AVX,SANDYBRIDGE,LONG
+VPEXTRW reg32,xmmreg,imm8 [rmi: vex.128.66.0f.w0 c5 /r ib] AVX,SANDYBRIDGE
+VPEXTRW reg64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 15 /r ib] AVX,SANDYBRIDGE,LONG
+VPEXTRW reg32,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 15 /r ib] AVX,SANDYBRIDGE
+VPEXTRW mem16,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 15 /r ib] AVX,SANDYBRIDGE
+VPEXTRD reg64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 16 /r ib] AVX,SANDYBRIDGE,LONG
+VPEXTRD rm32,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 16 /r ib] AVX,SANDYBRIDGE
+VPEXTRQ rm64,xmmreg,imm8 [mri: vex.128.66.0f3a.w1 16 /r ib] AVX,SANDYBRIDGE,LONG
+VPHADDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 01 /r] AVX,SANDYBRIDGE
+VPHADDD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 02 /r] AVX,SANDYBRIDGE
+VPHADDSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 03 /r] AVX,SANDYBRIDGE
+VPHMINPOSUW xmmreg,xmmrm128 [rm: vex.128.66.0f38 41 /r] AVX,SANDYBRIDGE
+VPHSUBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 05 /r] AVX,SANDYBRIDGE
+VPHSUBD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 06 /r] AVX,SANDYBRIDGE
+VPHSUBSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 07 /r] AVX,SANDYBRIDGE
+VPINSRB xmmreg,xmmreg*,mem8,imm8 [rvmi: vex.nds.128.66.0f3a 20 /r ib] AVX,SANDYBRIDGE
+VPINSRB xmmreg,xmmreg*,rm8,imm8 [rvmi: vex.nds.128.66.0f3a 20 /r ib] AVX,SANDYBRIDGE
+VPINSRB xmmreg,xmmreg*,reg32,imm8 [rvmi: vex.nds.128.66.0f3a 20 /r ib] AVX,SANDYBRIDGE
+VPINSRW xmmreg,xmmreg*,mem16,imm8 [rvmi: vex.nds.128.66.0f c4 /r ib] AVX,SANDYBRIDGE
+VPINSRW xmmreg,xmmreg*,rm16,imm8 [rvmi: vex.nds.128.66.0f c4 /r ib] AVX,SANDYBRIDGE
+VPINSRW xmmreg,xmmreg*,reg32,imm8 [rvmi: vex.nds.128.66.0f c4 /r ib] AVX,SANDYBRIDGE
+VPINSRD xmmreg,xmmreg*,mem32,imm8 [rvmi: vex.nds.128.66.0f3a.w0 22 /r ib] AVX,SANDYBRIDGE
+VPINSRD xmmreg,xmmreg*,rm32,imm8 [rvmi: vex.nds.128.66.0f3a.w0 22 /r ib] AVX,SANDYBRIDGE
+VPINSRQ xmmreg,xmmreg*,mem64,imm8 [rvmi: vex.nds.128.66.0f3a.w1 22 /r ib] AVX,SANDYBRIDGE,LONG
+VPINSRQ xmmreg,xmmreg*,rm64,imm8 [rvmi: vex.nds.128.66.0f3a.w1 22 /r ib] AVX,SANDYBRIDGE,LONG
+VPMADDWD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f5 /r] AVX,SANDYBRIDGE
+VPMADDUBSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 04 /r] AVX,SANDYBRIDGE
+VPMAXSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3c /r] AVX,SANDYBRIDGE
+VPMAXSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ee /r] AVX,SANDYBRIDGE
+VPMAXSD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3d /r] AVX,SANDYBRIDGE
+VPMAXUB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f de /r] AVX,SANDYBRIDGE
+VPMAXUW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3e /r] AVX,SANDYBRIDGE
+VPMAXUD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3f /r] AVX,SANDYBRIDGE
+VPMINSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 38 /r] AVX,SANDYBRIDGE
+VPMINSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ea /r] AVX,SANDYBRIDGE
+VPMINSD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 39 /r] AVX,SANDYBRIDGE
+VPMINUB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f da /r] AVX,SANDYBRIDGE
+VPMINUW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3a /r] AVX,SANDYBRIDGE
+VPMINUD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3b /r] AVX,SANDYBRIDGE
+VPMOVMSKB reg64,xmmreg [rm: vex.128.66.0f d7 /r] AVX,SANDYBRIDGE,LONG
+VPMOVMSKB reg32,xmmreg [rm: vex.128.66.0f d7 /r] AVX,SANDYBRIDGE
+VPMOVSXBW xmmreg,xmmrm64 [rm: vex.128.66.0f38 20 /r] AVX,SANDYBRIDGE
+VPMOVSXBD xmmreg,xmmrm32 [rm: vex.128.66.0f38 21 /r] AVX,SANDYBRIDGE
+VPMOVSXBQ xmmreg,xmmrm16 [rm: vex.128.66.0f38 22 /r] AVX,SANDYBRIDGE
+VPMOVSXWD xmmreg,xmmrm64 [rm: vex.128.66.0f38 23 /r] AVX,SANDYBRIDGE
+VPMOVSXWQ xmmreg,xmmrm32 [rm: vex.128.66.0f38 24 /r] AVX,SANDYBRIDGE
+VPMOVSXDQ xmmreg,xmmrm64 [rm: vex.128.66.0f38 25 /r] AVX,SANDYBRIDGE
+VPMOVZXBW xmmreg,xmmrm64 [rm: vex.128.66.0f38 30 /r] AVX,SANDYBRIDGE
+VPMOVZXBD xmmreg,xmmrm32 [rm: vex.128.66.0f38 31 /r] AVX,SANDYBRIDGE
+VPMOVZXBQ xmmreg,xmmrm16 [rm: vex.128.66.0f38 32 /r] AVX,SANDYBRIDGE
+VPMOVZXWD xmmreg,xmmrm64 [rm: vex.128.66.0f38 33 /r] AVX,SANDYBRIDGE
+VPMOVZXWQ xmmreg,xmmrm32 [rm: vex.128.66.0f38 34 /r] AVX,SANDYBRIDGE
+VPMOVZXDQ xmmreg,xmmrm64 [rm: vex.128.66.0f38 35 /r] AVX,SANDYBRIDGE
+VPMULHUW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e4 /r] AVX,SANDYBRIDGE
+VPMULHRSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 0b /r] AVX,SANDYBRIDGE
+VPMULHW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e5 /r] AVX,SANDYBRIDGE
+VPMULLW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d5 /r] AVX,SANDYBRIDGE
+VPMULLD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 40 /r] AVX,SANDYBRIDGE
+VPMULUDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f4 /r] AVX,SANDYBRIDGE
+VPMULDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 28 /r] AVX,SANDYBRIDGE
+VPOR xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f eb /r] AVX,SANDYBRIDGE
+VPSADBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f6 /r] AVX,SANDYBRIDGE
+VPSHUFB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 00 /r] AVX,SANDYBRIDGE
+VPSHUFD xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f 70 /r ib] AVX,SANDYBRIDGE
+VPSHUFHW xmmreg,xmmrm128,imm8 [rmi: vex.128.f3.0f 70 /r ib] AVX,SANDYBRIDGE
+VPSHUFLW xmmreg,xmmrm128,imm8 [rmi: vex.128.f2.0f 70 /r ib] AVX,SANDYBRIDGE
+VPSIGNB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 08 /r] AVX,SANDYBRIDGE
+VPSIGNW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 09 /r] AVX,SANDYBRIDGE
+VPSIGND xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 0a /r] AVX,SANDYBRIDGE
+VPSLLDQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /7 ib] AVX,SANDYBRIDGE
+VPSRLDQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /3 ib] AVX,SANDYBRIDGE
+VPSLLW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f1 /r] AVX,SANDYBRIDGE
+VPSLLW xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 71 /6 ib] AVX,SANDYBRIDGE
+VPSLLD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f2 /r] AVX,SANDYBRIDGE
+VPSLLD xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 72 /6 ib] AVX,SANDYBRIDGE
+VPSLLQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f3 /r] AVX,SANDYBRIDGE
+VPSLLQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /6 ib] AVX,SANDYBRIDGE
+VPSRAW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e1 /r] AVX,SANDYBRIDGE
+VPSRAW xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 71 /4 ib] AVX,SANDYBRIDGE
+VPSRAD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e2 /r] AVX,SANDYBRIDGE
+VPSRAD xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 72 /4 ib] AVX,SANDYBRIDGE
+VPSRLW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d1 /r] AVX,SANDYBRIDGE
+VPSRLW xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 71 /2 ib] AVX,SANDYBRIDGE
+VPSRLD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d2 /r] AVX,SANDYBRIDGE
+VPSRLD xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 72 /2 ib] AVX,SANDYBRIDGE
+VPSRLQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d3 /r] AVX,SANDYBRIDGE
+VPSRLQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /2 ib] AVX,SANDYBRIDGE
+VPTEST xmmreg,xmmrm128 [rm: vex.128.66.0f38 17 /r] AVX,SANDYBRIDGE
+VPTEST ymmreg,ymmrm256 [rm: vex.256.66.0f38 17 /r] AVX,SANDYBRIDGE
+VPSUBB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f8 /r] AVX,SANDYBRIDGE
+VPSUBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f9 /r] AVX,SANDYBRIDGE
+VPSUBD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fa /r] AVX,SANDYBRIDGE
+VPSUBQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fb /r] AVX,SANDYBRIDGE
+VPSUBSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e8 /r] AVX,SANDYBRIDGE
+VPSUBSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e9 /r] AVX,SANDYBRIDGE
+VPSUBUSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d8 /r] AVX,SANDYBRIDGE
+VPSUBUSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d9 /r] AVX,SANDYBRIDGE
+VPUNPCKHBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 68 /r] AVX,SANDYBRIDGE
+VPUNPCKHWD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 69 /r] AVX,SANDYBRIDGE
+VPUNPCKHDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6a /r] AVX,SANDYBRIDGE
+VPUNPCKHQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6d /r] AVX,SANDYBRIDGE
+VPUNPCKLBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 60 /r] AVX,SANDYBRIDGE
+VPUNPCKLWD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 61 /r] AVX,SANDYBRIDGE
+VPUNPCKLDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 62 /r] AVX,SANDYBRIDGE
+VPUNPCKLQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6c /r] AVX,SANDYBRIDGE
+VPXOR xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ef /r] AVX,SANDYBRIDGE
+VRCPPS xmmreg,xmmrm128 [rm: vex.128.0f 53 /r] AVX,SANDYBRIDGE
+VRCPPS ymmreg,ymmrm256 [rm: vex.256.0f 53 /r] AVX,SANDYBRIDGE
+VRCPSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 53 /r] AVX,SANDYBRIDGE
+VRSQRTPS xmmreg,xmmrm128 [rm: vex.128.0f 52 /r] AVX,SANDYBRIDGE
+VRSQRTPS ymmreg,ymmrm256 [rm: vex.256.0f 52 /r] AVX,SANDYBRIDGE
+VRSQRTSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 52 /r] AVX,SANDYBRIDGE
+VROUNDPD xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 09 /r ib] AVX,SANDYBRIDGE
+VROUNDPD ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a 09 /r ib] AVX,SANDYBRIDGE
+VROUNDPS xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 08 /r ib] AVX,SANDYBRIDGE
+VROUNDPS ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a 08 /r ib] AVX,SANDYBRIDGE
+VROUNDSD xmmreg,xmmreg*,xmmrm64,imm8 [rvmi: vex.nds.128.66.0f3a 0b /r ib] AVX,SANDYBRIDGE
+VROUNDSS xmmreg,xmmreg*,xmmrm32,imm8 [rvmi: vex.nds.128.66.0f3a 0a /r ib] AVX,SANDYBRIDGE
+VSHUFPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f c6 /r ib] AVX,SANDYBRIDGE
+VSHUFPD ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f c6 /r ib] AVX,SANDYBRIDGE
+VSHUFPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.0f c6 /r ib] AVX,SANDYBRIDGE
+VSHUFPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.0f c6 /r ib] AVX,SANDYBRIDGE
+VSQRTPD xmmreg,xmmrm128 [rm: vex.128.66.0f 51 /r] AVX,SANDYBRIDGE
+VSQRTPD ymmreg,ymmrm256 [rm: vex.256.66.0f 51 /r] AVX,SANDYBRIDGE
+VSQRTPS xmmreg,xmmrm128 [rm: vex.128.0f 51 /r] AVX,SANDYBRIDGE
+VSQRTPS ymmreg,ymmrm256 [rm: vex.256.0f 51 /r] AVX,SANDYBRIDGE
+VSQRTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 51 /r] AVX,SANDYBRIDGE
+VSQRTSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 51 /r] AVX,SANDYBRIDGE
+VSTMXCSR mem32 [m: vex.128.0f ae /3] AVX,SANDYBRIDGE
+VSUBPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5c /r] AVX,SANDYBRIDGE
+VSUBPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5c /r] AVX,SANDYBRIDGE
+VSUBPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5c /r] AVX,SANDYBRIDGE
+VSUBPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5c /r] AVX,SANDYBRIDGE
+VSUBSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5c /r] AVX,SANDYBRIDGE
+VSUBSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5c /r] AVX,SANDYBRIDGE
+VTESTPS xmmreg,xmmrm128 [rm: vex.128.66.0f38.w0 0e /r] AVX,SANDYBRIDGE
+VTESTPS ymmreg,ymmrm256 [rm: vex.256.66.0f38.w0 0e /r] AVX,SANDYBRIDGE
+VTESTPD xmmreg,xmmrm128 [rm: vex.128.66.0f38.w0 0f /r] AVX,SANDYBRIDGE
+VTESTPD ymmreg,ymmrm256 [rm: vex.256.66.0f38.w0 0f /r] AVX,SANDYBRIDGE
+VUCOMISD xmmreg,xmmrm64 [rm: vex.lig.66.0f 2e /r] AVX,SANDYBRIDGE
+VUCOMISS xmmreg,xmmrm32 [rm: vex.lig.0f 2e /r] AVX,SANDYBRIDGE
+VUNPCKHPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 15 /r] AVX,SANDYBRIDGE
+VUNPCKHPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 15 /r] AVX,SANDYBRIDGE
+VUNPCKHPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 15 /r] AVX,SANDYBRIDGE
+VUNPCKHPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 15 /r] AVX,SANDYBRIDGE
+VUNPCKLPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 14 /r] AVX,SANDYBRIDGE
+VUNPCKLPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 14 /r] AVX,SANDYBRIDGE
+VUNPCKLPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 14 /r] AVX,SANDYBRIDGE
+VUNPCKLPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 14 /r] AVX,SANDYBRIDGE
+VXORPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 57 /r] AVX,SANDYBRIDGE
+VXORPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 57 /r] AVX,SANDYBRIDGE
+VXORPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 57 /r] AVX,SANDYBRIDGE
+VXORPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 57 /r] AVX,SANDYBRIDGE
+VZEROALL void [ vex.256.0f.w0 77] AVX,SANDYBRIDGE
+VZEROUPPER void [ vex.128.0f.w0 77] AVX,SANDYBRIDGE
+
+;# Intel Carry-Less Multiplication instructions (CLMUL)
+PCLMULLQLQDQ xmmreg,xmmrm128 [rm: 66 0f 3a 44 /r 00] SSE,WESTMERE
+PCLMULHQLQDQ xmmreg,xmmrm128 [rm: 66 0f 3a 44 /r 01] SSE,WESTMERE
+PCLMULLQHQDQ xmmreg,xmmrm128 [rm: 66 0f 3a 44 /r 10] SSE,WESTMERE
+PCLMULHQHQDQ xmmreg,xmmrm128 [rm: 66 0f 3a 44 /r 11] SSE,WESTMERE
+PCLMULQDQ xmmreg,xmmrm128,imm8 [rmi: 66 0f 3a 44 /r ib] SSE,WESTMERE
+
+;# Intel AVX Carry-Less Multiplication instructions (CLMUL)
+VPCLMULLQLQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 00] AVX,SANDYBRIDGE
+VPCLMULHQLQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 01] AVX,SANDYBRIDGE
+VPCLMULLQHQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 10] AVX,SANDYBRIDGE
+VPCLMULHQHQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 11] AVX,SANDYBRIDGE
+VPCLMULQDQ xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 44 /r ib] AVX,SANDYBRIDGE
+
+;# Intel Fused Multiply-Add instructions (FMA)
+VFMADD132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 98 /r] FMA,FUTURE
+VFMADD132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 98 /r] FMA,FUTURE
+VFMADD132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 98 /r] FMA,FUTURE
+VFMADD132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 98 /r] FMA,FUTURE
+VFMADD312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 98 /r] FMA,FUTURE
+VFMADD312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 98 /r] FMA,FUTURE
+VFMADD312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 98 /r] FMA,FUTURE
+VFMADD312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 98 /r] FMA,FUTURE
+VFMADD213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a8 /r] FMA,FUTURE
+VFMADD213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a8 /r] FMA,FUTURE
+VFMADD213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a8 /r] FMA,FUTURE
+VFMADD213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a8 /r] FMA,FUTURE
+VFMADD123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a8 /r] FMA,FUTURE
+VFMADD123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a8 /r] FMA,FUTURE
+VFMADD123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a8 /r] FMA,FUTURE
+VFMADD123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a8 /r] FMA,FUTURE
+VFMADD231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b8 /r] FMA,FUTURE
+VFMADD231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b8 /r] FMA,FUTURE
+VFMADD231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b8 /r] FMA,FUTURE
+VFMADD231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b8 /r] FMA,FUTURE
+VFMADD321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b8 /r] FMA,FUTURE
+VFMADD321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b8 /r] FMA,FUTURE
+VFMADD321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b8 /r] FMA,FUTURE
+VFMADD321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b8 /r] FMA,FUTURE
+VFMADDSUB132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 96 /r] FMA,FUTURE
+VFMADDSUB132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 96 /r] FMA,FUTURE
+VFMADDSUB132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 96 /r] FMA,FUTURE
+VFMADDSUB132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 96 /r] FMA,FUTURE
+VFMADDSUB312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 96 /r] FMA,FUTURE
+VFMADDSUB312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 96 /r] FMA,FUTURE
+VFMADDSUB312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 96 /r] FMA,FUTURE
+VFMADDSUB312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 96 /r] FMA,FUTURE
+VFMADDSUB213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a6 /r] FMA,FUTURE
+VFMADDSUB213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a6 /r] FMA,FUTURE
+VFMADDSUB213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a6 /r] FMA,FUTURE
+VFMADDSUB213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a6 /r] FMA,FUTURE
+VFMADDSUB123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a6 /r] FMA,FUTURE
+VFMADDSUB123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a6 /r] FMA,FUTURE
+VFMADDSUB123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a6 /r] FMA,FUTURE
+VFMADDSUB123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a6 /r] FMA,FUTURE
+VFMADDSUB231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b6 /r] FMA,FUTURE
+VFMADDSUB231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b6 /r] FMA,FUTURE
+VFMADDSUB231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b6 /r] FMA,FUTURE
+VFMADDSUB231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b6 /r] FMA,FUTURE
+VFMADDSUB321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b6 /r] FMA,FUTURE
+VFMADDSUB321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b6 /r] FMA,FUTURE
+VFMADDSUB321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b6 /r] FMA,FUTURE
+VFMADDSUB321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b6 /r] FMA,FUTURE
+VFMSUB132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9a /r] FMA,FUTURE
+VFMSUB132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9a /r] FMA,FUTURE
+VFMSUB132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9a /r] FMA,FUTURE
+VFMSUB132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9a /r] FMA,FUTURE
+VFMSUB312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9a /r] FMA,FUTURE
+VFMSUB312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9a /r] FMA,FUTURE
+VFMSUB312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9a /r] FMA,FUTURE
+VFMSUB312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9a /r] FMA,FUTURE
+VFMSUB213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 aa /r] FMA,FUTURE
+VFMSUB213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 aa /r] FMA,FUTURE
+VFMSUB213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 aa /r] FMA,FUTURE
+VFMSUB213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 aa /r] FMA,FUTURE
+VFMSUB123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 aa /r] FMA,FUTURE
+VFMSUB123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 aa /r] FMA,FUTURE
+VFMSUB123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 aa /r] FMA,FUTURE
+VFMSUB123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 aa /r] FMA,FUTURE
+VFMSUB231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ba /r] FMA,FUTURE
+VFMSUB231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ba /r] FMA,FUTURE
+VFMSUB231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ba /r] FMA,FUTURE
+VFMSUB231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ba /r] FMA,FUTURE
+VFMSUB321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ba /r] FMA,FUTURE
+VFMSUB321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ba /r] FMA,FUTURE
+VFMSUB321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ba /r] FMA,FUTURE
+VFMSUB321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ba /r] FMA,FUTURE
+VFMSUBADD132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 97 /r] FMA,FUTURE
+VFMSUBADD132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 97 /r] FMA,FUTURE
+VFMSUBADD132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 97 /r] FMA,FUTURE
+VFMSUBADD132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 97 /r] FMA,FUTURE
+VFMSUBADD312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 97 /r] FMA,FUTURE
+VFMSUBADD312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 97 /r] FMA,FUTURE
+VFMSUBADD312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 97 /r] FMA,FUTURE
+VFMSUBADD312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 97 /r] FMA,FUTURE
+VFMSUBADD213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a7 /r] FMA,FUTURE
+VFMSUBADD213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a7 /r] FMA,FUTURE
+VFMSUBADD213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a7 /r] FMA,FUTURE
+VFMSUBADD213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a7 /r] FMA,FUTURE
+VFMSUBADD123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a7 /r] FMA,FUTURE
+VFMSUBADD123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a7 /r] FMA,FUTURE
+VFMSUBADD123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a7 /r] FMA,FUTURE
+VFMSUBADD123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a7 /r] FMA,FUTURE
+VFMSUBADD231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b7 /r] FMA,FUTURE
+VFMSUBADD231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b7 /r] FMA,FUTURE
+VFMSUBADD231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b7 /r] FMA,FUTURE
+VFMSUBADD231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b7 /r] FMA,FUTURE
+VFMSUBADD321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b7 /r] FMA,FUTURE
+VFMSUBADD321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b7 /r] FMA,FUTURE
+VFMSUBADD321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b7 /r] FMA,FUTURE
+VFMSUBADD321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b7 /r] FMA,FUTURE
+VFNMADD132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9c /r] FMA,FUTURE
+VFNMADD132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9c /r] FMA,FUTURE
+VFNMADD132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9c /r] FMA,FUTURE
+VFNMADD132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9c /r] FMA,FUTURE
+VFNMADD312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9c /r] FMA,FUTURE
+VFNMADD312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9c /r] FMA,FUTURE
+VFNMADD312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9c /r] FMA,FUTURE
+VFNMADD312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9c /r] FMA,FUTURE
+VFNMADD213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ac /r] FMA,FUTURE
+VFNMADD213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ac /r] FMA,FUTURE
+VFNMADD213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ac /r] FMA,FUTURE
+VFNMADD213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ac /r] FMA,FUTURE
+VFNMADD123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ac /r] FMA,FUTURE
+VFNMADD123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ac /r] FMA,FUTURE
+VFNMADD123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ac /r] FMA,FUTURE
+VFNMADD123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ac /r] FMA,FUTURE
+VFNMADD231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 bc /r] FMA,FUTURE
+VFNMADD231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 bc /r] FMA,FUTURE
+VFNMADD231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 bc /r] FMA,FUTURE
+VFNMADD231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 bc /r] FMA,FUTURE
+VFNMADD321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 bc /r] FMA,FUTURE
+VFNMADD321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 bc /r] FMA,FUTURE
+VFNMADD321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 bc /r] FMA,FUTURE
+VFNMADD321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 bc /r] FMA,FUTURE
+VFNMSUB132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9e /r] FMA,FUTURE
+VFNMSUB132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9e /r] FMA,FUTURE
+VFNMSUB132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9e /r] FMA,FUTURE
+VFNMSUB132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9e /r] FMA,FUTURE
+VFNMSUB312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9e /r] FMA,FUTURE
+VFNMSUB312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9e /r] FMA,FUTURE
+VFNMSUB312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9e /r] FMA,FUTURE
+VFNMSUB312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9e /r] FMA,FUTURE
+VFNMSUB213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ae /r] FMA,FUTURE
+VFNMSUB213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ae /r] FMA,FUTURE
+VFNMSUB213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ae /r] FMA,FUTURE
+VFNMSUB213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ae /r] FMA,FUTURE
+VFNMSUB123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ae /r] FMA,FUTURE
+VFNMSUB123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ae /r] FMA,FUTURE
+VFNMSUB123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ae /r] FMA,FUTURE
+VFNMSUB123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ae /r] FMA,FUTURE
+VFNMSUB231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 be /r] FMA,FUTURE
+VFNMSUB231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 be /r] FMA,FUTURE
+VFNMSUB231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 be /r] FMA,FUTURE
+VFNMSUB231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 be /r] FMA,FUTURE
+VFNMSUB321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 be /r] FMA,FUTURE
+VFNMSUB321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 be /r] FMA,FUTURE
+VFNMSUB321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 be /r] FMA,FUTURE
+VFNMSUB321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 be /r] FMA,FUTURE
+VFMADD132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 99 /r] FMA,FUTURE
+VFMADD132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 99 /r] FMA,FUTURE
+VFMADD312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 99 /r] FMA,FUTURE
+VFMADD312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 99 /r] FMA,FUTURE
+VFMADD213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 a9 /r] FMA,FUTURE
+VFMADD213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 a9 /r] FMA,FUTURE
+VFMADD123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 a9 /r] FMA,FUTURE
+VFMADD123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 a9 /r] FMA,FUTURE
+VFMADD231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 b9 /r] FMA,FUTURE
+VFMADD231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 b9 /r] FMA,FUTURE
+VFMADD321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 b9 /r] FMA,FUTURE
+VFMADD321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 b9 /r] FMA,FUTURE
+VFMSUB132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9b /r] FMA,FUTURE
+VFMSUB132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9b /r] FMA,FUTURE
+VFMSUB312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9b /r] FMA,FUTURE
+VFMSUB312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9b /r] FMA,FUTURE
+VFMSUB213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ab /r] FMA,FUTURE
+VFMSUB213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ab /r] FMA,FUTURE
+VFMSUB123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ab /r] FMA,FUTURE
+VFMSUB123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ab /r] FMA,FUTURE
+VFMSUB231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bb /r] FMA,FUTURE
+VFMSUB231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bb /r] FMA,FUTURE
+VFMSUB321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bb /r] FMA,FUTURE
+VFMSUB321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bb /r] FMA,FUTURE
+VFNMADD132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9d /r] FMA,FUTURE
+VFNMADD132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9d /r] FMA,FUTURE
+VFNMADD312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9d /r] FMA,FUTURE
+VFNMADD312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9d /r] FMA,FUTURE
+VFNMADD213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ad /r] FMA,FUTURE
+VFNMADD213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ad /r] FMA,FUTURE
+VFNMADD123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ad /r] FMA,FUTURE
+VFNMADD123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ad /r] FMA,FUTURE
+VFNMADD231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bd /r] FMA,FUTURE
+VFNMADD231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bd /r] FMA,FUTURE
+VFNMADD321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bd /r] FMA,FUTURE
+VFNMADD321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bd /r] FMA,FUTURE
+VFNMSUB132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9f /r] FMA,FUTURE
+VFNMSUB132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9f /r] FMA,FUTURE
+VFNMSUB312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9f /r] FMA,FUTURE
+VFNMSUB312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9f /r] FMA,FUTURE
+VFNMSUB213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 af /r] FMA,FUTURE
+VFNMSUB213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 af /r] FMA,FUTURE
+VFNMSUB123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 af /r] FMA,FUTURE
+VFNMSUB123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 af /r] FMA,FUTURE
+VFNMSUB231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bf /r] FMA,FUTURE
+VFNMSUB231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bf /r] FMA,FUTURE
+VFNMSUB321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bf /r] FMA,FUTURE
+VFNMSUB321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bf /r] FMA,FUTURE
+
+;# Intel post-32 nm processor instructions
+;
+; Per AVX spec revision 7, document 319433-007
+RDFSBASE reg32 [m: f3 0f ae /0] LONG,FUTURE
+RDFSBASE reg64 [m: o64 f3 0f ae /0] LONG,FUTURE
+RDGSBASE reg32 [m: f3 0f ae /1] LONG,FUTURE
+RDGSBASE reg64 [m: o64 f3 0f ae /1] LONG,FUTURE
+RDRAND reg16 [m: o16 0f c7 /6] FUTURE
+RDRAND reg32 [m: o32 0f c7 /6] FUTURE
+RDRAND reg64 [m: o64 0f c7 /6] LONG,FUTURE
+WRFSBASE reg32 [m: f3 0f ae /2] LONG,FUTURE
+WRFSBASE reg64 [m: o64 f3 0f ae /2] LONG,FUTURE
+WRGSBASE reg32 [m: f3 0f ae /3] LONG,FUTURE
+WRGSBASE reg64 [m: o64 f3 0f ae /3] LONG,FUTURE
+VCVTPH2PS ymmreg,xmmrm128 [rm: vex.256.66.0f38.w0 13 /r] AVX,FUTURE
+VCVTPH2PS xmmreg,xmmrm64 [rm: vex.128.66.0f38.w0 13 /r] AVX,FUTURE
+VCVTPS2PH xmmrm128,ymmreg,imm8 [mri: vex.256.66.0f3a.w0 1d /r ib] AVX,FUTURE
+VCVTPS2PH xmmrm64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 1d /r ib] AVX,FUTURE
+
+;# VIA (Centaur) security instructions
+XSTORE void \3\x0F\xA7\xC0 PENT,CYRIX
+XCRYPTECB void \336\3\x0F\xA7\xC8 PENT,CYRIX
+XCRYPTCBC void \336\3\x0F\xA7\xD0 PENT,CYRIX
+XCRYPTCTR void \336\3\x0F\xA7\xD8 PENT,CYRIX
+XCRYPTCFB void \336\3\x0F\xA7\xE0 PENT,CYRIX
+XCRYPTOFB void \336\3\x0F\xA7\xE8 PENT,CYRIX
+MONTMUL void \336\3\x0F\xA6\xC0 PENT,CYRIX
+XSHA1 void \336\3\x0F\xA6\xC8 PENT,CYRIX
+XSHA256 void \336\3\x0F\xA6\xD0 PENT,CYRIX
+
+;# AMD Lightweight Profiling (LWP) instructions
+;
+; based on pub number 43724 revision 3.04 date August 2009
+;
+; updated to match draft from AMD developer (patch has been
+; sent to binutils
+; 2010-03-22 Quentin Neill <quentin.neill@amd.com>
+; Sebastian Pop <sebastian.pop@amd.com>
+;
+LLWPCB reg32 [m: xop.m9.w0.l0.p0 12 /0] AMD,386
+LLWPCB reg64 [m: xop.m9.w1.l0.p0 12 /0] AMD,X64
+
+SLWPCB reg32 [m: xop.m9.w0.l0.p0 12 /1] AMD,386
+SLWPCB reg64 [m: xop.m9.w1.l0.p0 12 /1] AMD,X64
+
+LWPVAL reg32,rm32,imm32 [vmi: xop.m10.w0.ndd.l0.p0 12 /1 id] AMD,386
+LWPVAL reg64,rm32,imm32 [vmi: xop.m10.w1.ndd.l0.p0 12 /1 id] AMD,X64
+
+LWPINS reg32,rm32,imm32 [vmi: xop.m10.w0.ndd.l0.p0 12 /0 id] AMD,386
+LWPINS reg64,rm32,imm32 [vmi: xop.m10.w1.ndd.l0.p0 12 /0 id] AMD,X64
+
+;# AMD XOP and FMA4 instructions (SSE5)
+;
+; based on pub number 43479 revision 3.04 dated November 2009
+;
+VFMADDPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 69 /r /is4] AMD,SSE5
+VFMADDPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 69 /r /is4] AMD,SSE5
+VFMADDPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 69 /r /is4] AMD,SSE5
+VFMADDPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 69 /r /is4] AMD,SSE5
+
+VFMADDPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 68 /r /is4] AMD,SSE5
+VFMADDPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 68 /r /is4] AMD,SSE5
+VFMADDPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 68 /r /is4] AMD,SSE5
+VFMADDPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 68 /r /is4] AMD,SSE5
+
+VFMADDSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6b /r /is4] AMD,SSE5
+VFMADDSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 6b /r /is4] AMD,SSE5
+
+VFMADDSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6a /r /is4] AMD,SSE5
+VFMADDSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 6a /r /is4] AMD,SSE5
+
+VFMADDSUBPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5d /r /is4] AMD,SSE5
+VFMADDSUBPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5d /r /is4] AMD,SSE5
+VFMADDSUBPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5d /r /is4] AMD,SSE5
+VFMADDSUBPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5d /r /is4] AMD,SSE5
+
+VFMADDSUBPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5c /r /is4] AMD,SSE5
+VFMADDSUBPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5c /r /is4] AMD,SSE5
+VFMADDSUBPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5c /r /is4] AMD,SSE5
+VFMADDSUBPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5c /r /is4] AMD,SSE5
+
+VFMSUBADDPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5f /r /is4] AMD,SSE5
+VFMSUBADDPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5f /r /is4] AMD,SSE5
+VFMSUBADDPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5f /r /is4] AMD,SSE5
+VFMSUBADDPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5f /r /is4] AMD,SSE5
+
+VFMSUBADDPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5e /r /is4] AMD,SSE5
+VFMSUBADDPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5e /r /is4] AMD,SSE5
+VFMSUBADDPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5e /r /is4] AMD,SSE5
+VFMSUBADDPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5e /r /is4] AMD,SSE5
+
+VFMSUBPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6d /r /is4] AMD,SSE5
+VFMSUBPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 6d /r /is4] AMD,SSE5
+VFMSUBPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 6d /r /is4] AMD,SSE5
+VFMSUBPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 6d /r /is4] AMD,SSE5
+
+VFMSUBPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6c /r /is4] AMD,SSE5
+VFMSUBPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 6c /r /is4] AMD,SSE5
+VFMSUBPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 6c /r /is4] AMD,SSE5
+VFMSUBPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 6c /r /is4] AMD,SSE5
+
+VFMSUBSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6f /r /is4] AMD,SSE5
+VFMSUBSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 6f /r /is4] AMD,SSE5
+
+VFMSUBSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6e /r /is4] AMD,SSE5
+VFMSUBSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 6e /r /is4] AMD,SSE5
+
+VFNMADDPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 79 /r /is4] AMD,SSE5
+VFNMADDPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 79 /r /is4] AMD,SSE5
+VFNMADDPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 79 /r /is4] AMD,SSE5
+VFNMADDPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 79 /r /is4] AMD,SSE5
+
+VFNMADDPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 78 /r /is4] AMD,SSE5
+VFNMADDPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 78 /r /is4] AMD,SSE5
+VFNMADDPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 78 /r /is4] AMD,SSE5
+VFNMADDPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 78 /r /is4] AMD,SSE5
+
+VFNMADDSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7b /r /is4] AMD,SSE5
+VFNMADDSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 7b /r /is4] AMD,SSE5
+
+VFNMADDSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7a /r /is4] AMD,SSE5
+VFNMADDSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 7a /r /is4] AMD,SSE5
+
+VFNMSUBPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7d /r /is4] AMD,SSE5
+VFNMSUBPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 7d /r /is4] AMD,SSE5
+VFNMSUBPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 7d /r /is4] AMD,SSE5
+VFNMSUBPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 7d /r /is4] AMD,SSE5
+
+VFNMSUBPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7c /r /is4] AMD,SSE5
+VFNMSUBPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 7c /r /is4] AMD,SSE5
+VFNMSUBPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 7c /r /is4] AMD,SSE5
+VFNMSUBPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 7c /r /is4] AMD,SSE5
+
+VFNMSUBSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7f /r /is4] AMD,SSE5
+VFNMSUBSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 7f /r /is4] AMD,SSE5
+
+VFNMSUBSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7e /r /is4] AMD,SSE5
+VFNMSUBSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 7e /r /is4] AMD,SSE5
+
+VFRCZPD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 81 /r] AMD,SSE5
+VFRCZPD ymmreg,ymmrm256* [rm: xop.m9.w0.l1.p0 81 /r] AMD,SSE5
+
+VFRCZPS xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 80 /r] AMD,SSE5
+VFRCZPS ymmreg,ymmrm256* [rm: xop.m9.w0.l1.p0 80 /r] AMD,SSE5
+
+VFRCZSD xmmreg,xmmrm64* [rm: xop.m9.w0.l0.p0 83 /r] AMD,SSE5
+
+VFRCZSS xmmreg,xmmrm32* [rm: xop.m9.w0.l0.p0 82 /r] AMD,SSE5
+;
+; fixed: spec mention imm[7:4] though it should be /is4 even in spec
+VPCMOV xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 a2 /r /is4] AMD,SSE5
+VPCMOV ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: xop.m8.w0.nds.l1.p0 a2 /r /is4] AMD,SSE5
+VPCMOV xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: xop.m8.w1.nds.l0.p0 a2 /r /is4] AMD,SSE5
+VPCMOV ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: xop.m8.w1.nds.l1.p0 a2 /r /is4] AMD,SSE5
+
+VPCOMB xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 cc /r ib] AMD,SSE5
+VPCOMD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ce /r ib] AMD,SSE5
+VPCOMQ xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 cf /r ib] AMD,SSE5
+;
+; fixed: spec mention only 3 operands in mnemonics
+VPCOMUB xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ec /r ib] AMD,SSE5
+VPCOMUD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ee /r ib] AMD,SSE5
+VPCOMUQ xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ef /r ib] AMD,SSE5
+;
+; fixed: spec point wrong VPCOMB in mnemonic
+VPCOMUW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ed /r ib] AMD,SSE5
+VPCOMW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 cd /r ib] AMD,SSE5
+
+VPHADDBD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c2 /r] AMD,SSE5
+VPHADDBQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c3 /r] AMD,SSE5
+VPHADDBW xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c1 /r] AMD,SSE5
+VPHADDDQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 cb /r] AMD,SSE5
+;
+; fixed: spec has ymmreg for l0
+VPHADDUBD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d2 /r] AMD,SSE5
+VPHADDUBQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d3 /r] AMD,SSE5
+;
+; fixed: spec has VPHADDUBWD
+VPHADDUBW xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d1 /r] AMD,SSE5
+;
+; fixed: opcode db
+VPHADDUDQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 db /r] AMD,SSE5
+VPHADDUWD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d6 /r] AMD,SSE5
+VPHADDUWQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d7 /r] AMD,SSE5
+;
+; fixed: spec has ymmreg for l0
+VPHADDWD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c6 /r] AMD,SSE5
+;
+; fixed: spec has d7 opcode
+VPHADDWQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c7 /r] AMD,SSE5
+
+VPHSUBBW xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 e1 /r] AMD,SSE5
+VPHSUBDQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 e3 /r] AMD,SSE5
+VPHSUBWD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 e2 /r] AMD,SSE5
+
+VPMACSDD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 9e /r /is4] AMD,SSE5
+;
+; fixed: spec has 97,9f opcodes here
+VPMACSDQH xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 9f /r /is4] AMD,SSE5
+VPMACSDQL xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 97 /r /is4] AMD,SSE5
+VPMACSSDD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 8e /r /is4] AMD,SSE5
+VPMACSSDQH xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 8f /r /is4] AMD,SSE5
+VPMACSSDQL xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 87 /r /is4] AMD,SSE5
+VPMACSSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 86 /r /is4] AMD,SSE5
+VPMACSSWW xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 85 /r /is4] AMD,SSE5
+VPMACSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 96 /r /is4] AMD,SSE5
+VPMACSWW xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 95 /r /is4] AMD,SSE5
+VPMADCSSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 a6 /r /is4] AMD,SSE5
+VPMADCSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 b6 /r /is4] AMD,SSE5
+
+VPPERM xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: xop.m8.w1.nds.l0.p0 a3 /r /is4] AMD,SSE5
+VPPERM xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 a3 /r /is4] AMD,SSE5
+
+VPROTB xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 90 /r] AMD,SSE5
+VPROTB xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 90 /r] AMD,SSE5
+;
+; fixed: spec point xmmreg instead of reg/mem
+VPROTB xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c0 /r ib] AMD,SSE5
+
+VPROTD xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 92 /r] AMD,SSE5
+VPROTD xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 92 /r] AMD,SSE5
+;
+; fixed: spec error /r is needed
+VPROTD xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c2 /r ib] AMD,SSE5
+VPROTQ xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 93 /r] AMD,SSE5
+VPROTQ xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 93 /r] AMD,SSE5
+;
+; fixed: spec error /r is needed
+VPROTQ xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c3 /r ib] AMD,SSE5
+VPROTW xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 91 /r] AMD,SSE5
+VPROTW xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 91 /r] AMD,SSE5
+VPROTW xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c1 /r ib] AMD,SSE5
+
+VPSHAB xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 98 /r] AMD,SSE5
+VPSHAB xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 98 /r] AMD,SSE5
+
+VPSHAD xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 9a /r] AMD,SSE5
+VPSHAD xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 9a /r] AMD,SSE5
+
+VPSHAQ xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 9b /r] AMD,SSE5
+VPSHAQ xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 9b /r] AMD,SSE5
+
+VPSHAW xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 99 /r] AMD,SSE5
+VPSHAW xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 99 /r] AMD,SSE5
+
+VPSHLB xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 94 /r] AMD,SSE5
+VPSHLB xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 94 /r] AMD,SSE5
+
+;
+; fixed: spec has ymmreg for l0
+VPSHLD xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 96 /r] AMD,SSE5
+VPSHLD xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 96 /r] AMD,SSE5
+
+VPSHLQ xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 97 /r] AMD,SSE5
+VPSHLQ xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 97 /r] AMD,SSE5
+
+VPSHLW xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 95 /r] AMD,SSE5
+VPSHLW xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 95 /r] AMD,SSE5
+
+
+;# Systematic names for the hinting nop instructions
+; These should be last in the file
+HINT_NOP0 rm16 \320\2\x0F\x18\200 P6,UNDOC
+HINT_NOP0 rm32 \321\2\x0F\x18\200 P6,UNDOC
+HINT_NOP0 rm64 \324\2\x0F\x18\200 X64,UNDOC
+HINT_NOP1 rm16 \320\2\x0F\x18\201 P6,UNDOC
+HINT_NOP1 rm32 \321\2\x0F\x18\201 P6,UNDOC
+HINT_NOP1 rm64 \324\2\x0F\x18\201 X64,UNDOC
+HINT_NOP2 rm16 \320\2\x0F\x18\202 P6,UNDOC
+HINT_NOP2 rm32 \321\2\x0F\x18\202 P6,UNDOC
+HINT_NOP2 rm64 \324\2\x0F\x18\202 X64,UNDOC
+HINT_NOP3 rm16 \320\2\x0F\x18\203 P6,UNDOC
+HINT_NOP3 rm32 \321\2\x0F\x18\203 P6,UNDOC
+HINT_NOP3 rm64 \324\2\x0F\x18\203 X64,UNDOC
+HINT_NOP4 rm16 \320\2\x0F\x18\204 P6,UNDOC
+HINT_NOP4 rm32 \321\2\x0F\x18\204 P6,UNDOC
+HINT_NOP4 rm64 \324\2\x0F\x18\204 X64,UNDOC
+HINT_NOP5 rm16 \320\2\x0F\x18\205 P6,UNDOC
+HINT_NOP5 rm32 \321\2\x0F\x18\205 P6,UNDOC
+HINT_NOP5 rm64 \324\2\x0F\x18\205 X64,UNDOC
+HINT_NOP6 rm16 \320\2\x0F\x18\206 P6,UNDOC
+HINT_NOP6 rm32 \321\2\x0F\x18\206 P6,UNDOC
+HINT_NOP6 rm64 \324\2\x0F\x18\206 X64,UNDOC
+HINT_NOP7 rm16 \320\2\x0F\x18\207 P6,UNDOC
+HINT_NOP7 rm32 \321\2\x0F\x18\207 P6,UNDOC
+HINT_NOP7 rm64 \324\2\x0F\x18\207 X64,UNDOC
+HINT_NOP8 rm16 \320\2\x0F\x19\200 P6,UNDOC
+HINT_NOP8 rm32 \321\2\x0F\x19\200 P6,UNDOC
+HINT_NOP8 rm64 \324\2\x0F\x19\200 X64,UNDOC
+HINT_NOP9 rm16 \320\2\x0F\x19\201 P6,UNDOC
+HINT_NOP9 rm32 \321\2\x0F\x19\201 P6,UNDOC
+HINT_NOP9 rm64 \324\2\x0F\x19\201 X64,UNDOC
+HINT_NOP10 rm16 \320\2\x0F\x19\202 P6,UNDOC
+HINT_NOP10 rm32 \321\2\x0F\x19\202 P6,UNDOC
+HINT_NOP10 rm64 \324\2\x0F\x19\202 X64,UNDOC
+HINT_NOP11 rm16 \320\2\x0F\x19\203 P6,UNDOC
+HINT_NOP11 rm32 \321\2\x0F\x19\203 P6,UNDOC
+HINT_NOP11 rm64 \324\2\x0F\x19\203 X64,UNDOC
+HINT_NOP12 rm16 \320\2\x0F\x19\204 P6,UNDOC
+HINT_NOP12 rm32 \321\2\x0F\x19\204 P6,UNDOC
+HINT_NOP12 rm64 \324\2\x0F\x19\204 X64,UNDOC
+HINT_NOP13 rm16 \320\2\x0F\x19\205 P6,UNDOC
+HINT_NOP13 rm32 \321\2\x0F\x19\205 P6,UNDOC
+HINT_NOP13 rm64 \324\2\x0F\x19\205 X64,UNDOC
+HINT_NOP14 rm16 \320\2\x0F\x19\206 P6,UNDOC
+HINT_NOP14 rm32 \321\2\x0F\x19\206 P6,UNDOC
+HINT_NOP14 rm64 \324\2\x0F\x19\206 X64,UNDOC
+HINT_NOP15 rm16 \320\2\x0F\x19\207 P6,UNDOC
+HINT_NOP15 rm32 \321\2\x0F\x19\207 P6,UNDOC
+HINT_NOP15 rm64 \324\2\x0F\x19\207 X64,UNDOC
+HINT_NOP16 rm16 \320\2\x0F\x1A\200 P6,UNDOC
+HINT_NOP16 rm32 \321\2\x0F\x1A\200 P6,UNDOC
+HINT_NOP16 rm64 \324\2\x0F\x1A\200 X64,UNDOC
+HINT_NOP17 rm16 \320\2\x0F\x1A\201 P6,UNDOC
+HINT_NOP17 rm32 \321\2\x0F\x1A\201 P6,UNDOC
+HINT_NOP17 rm64 \324\2\x0F\x1A\201 X64,UNDOC
+HINT_NOP18 rm16 \320\2\x0F\x1A\202 P6,UNDOC
+HINT_NOP18 rm32 \321\2\x0F\x1A\202 P6,UNDOC
+HINT_NOP18 rm64 \324\2\x0F\x1A\202 X64,UNDOC
+HINT_NOP19 rm16 \320\2\x0F\x1A\203 P6,UNDOC
+HINT_NOP19 rm32 \321\2\x0F\x1A\203 P6,UNDOC
+HINT_NOP19 rm64 \324\2\x0F\x1A\203 X64,UNDOC
+HINT_NOP20 rm16 \320\2\x0F\x1A\204 P6,UNDOC
+HINT_NOP20 rm32 \321\2\x0F\x1A\204 P6,UNDOC
+HINT_NOP20 rm64 \324\2\x0F\x1A\204 X64,UNDOC
+HINT_NOP21 rm16 \320\2\x0F\x1A\205 P6,UNDOC
+HINT_NOP21 rm32 \321\2\x0F\x1A\205 P6,UNDOC
+HINT_NOP21 rm64 \324\2\x0F\x1A\205 X64,UNDOC
+HINT_NOP22 rm16 \320\2\x0F\x1A\206 P6,UNDOC
+HINT_NOP22 rm32 \321\2\x0F\x1A\206 P6,UNDOC
+HINT_NOP22 rm64 \324\2\x0F\x1A\206 X64,UNDOC
+HINT_NOP23 rm16 \320\2\x0F\x1A\207 P6,UNDOC
+HINT_NOP23 rm32 \321\2\x0F\x1A\207 P6,UNDOC
+HINT_NOP23 rm64 \324\2\x0F\x1A\207 X64,UNDOC
+HINT_NOP24 rm16 \320\2\x0F\x1B\200 P6,UNDOC
+HINT_NOP24 rm32 \321\2\x0F\x1B\200 P6,UNDOC
+HINT_NOP24 rm64 \324\2\x0F\x1B\200 X64,UNDOC
+HINT_NOP25 rm16 \320\2\x0F\x1B\201 P6,UNDOC
+HINT_NOP25 rm32 \321\2\x0F\x1B\201 P6,UNDOC
+HINT_NOP25 rm64 \324\2\x0F\x1B\201 X64,UNDOC
+HINT_NOP26 rm16 \320\2\x0F\x1B\202 P6,UNDOC
+HINT_NOP26 rm32 \321\2\x0F\x1B\202 P6,UNDOC
+HINT_NOP26 rm64 \324\2\x0F\x1B\202 X64,UNDOC
+HINT_NOP27 rm16 \320\2\x0F\x1B\203 P6,UNDOC
+HINT_NOP27 rm32 \321\2\x0F\x1B\203 P6,UNDOC
+HINT_NOP27 rm64 \324\2\x0F\x1B\203 X64,UNDOC
+HINT_NOP28 rm16 \320\2\x0F\x1B\204 P6,UNDOC
+HINT_NOP28 rm32 \321\2\x0F\x1B\204 P6,UNDOC
+HINT_NOP28 rm64 \324\2\x0F\x1B\204 X64,UNDOC
+HINT_NOP29 rm16 \320\2\x0F\x1B\205 P6,UNDOC
+HINT_NOP29 rm32 \321\2\x0F\x1B\205 P6,UNDOC
+HINT_NOP29 rm64 \324\2\x0F\x1B\205 X64,UNDOC
+HINT_NOP30 rm16 \320\2\x0F\x1B\206 P6,UNDOC
+HINT_NOP30 rm32 \321\2\x0F\x1B\206 P6,UNDOC
+HINT_NOP30 rm64 \324\2\x0F\x1B\206 X64,UNDOC
+HINT_NOP31 rm16 \320\2\x0F\x1B\207 P6,UNDOC
+HINT_NOP31 rm32 \321\2\x0F\x1B\207 P6,UNDOC
+HINT_NOP31 rm64 \324\2\x0F\x1B\207 X64,UNDOC
+HINT_NOP32 rm16 \320\2\x0F\x1C\200 P6,UNDOC
+HINT_NOP32 rm32 \321\2\x0F\x1C\200 P6,UNDOC
+HINT_NOP32 rm64 \324\2\x0F\x1C\200 X64,UNDOC
+HINT_NOP33 rm16 \320\2\x0F\x1C\201 P6,UNDOC
+HINT_NOP33 rm32 \321\2\x0F\x1C\201 P6,UNDOC
+HINT_NOP33 rm64 \324\2\x0F\x1C\201 X64,UNDOC
+HINT_NOP34 rm16 \320\2\x0F\x1C\202 P6,UNDOC
+HINT_NOP34 rm32 \321\2\x0F\x1C\202 P6,UNDOC
+HINT_NOP34 rm64 \324\2\x0F\x1C\202 X64,UNDOC
+HINT_NOP35 rm16 \320\2\x0F\x1C\203 P6,UNDOC
+HINT_NOP35 rm32 \321\2\x0F\x1C\203 P6,UNDOC
+HINT_NOP35 rm64 \324\2\x0F\x1C\203 X64,UNDOC
+HINT_NOP36 rm16 \320\2\x0F\x1C\204 P6,UNDOC
+HINT_NOP36 rm32 \321\2\x0F\x1C\204 P6,UNDOC
+HINT_NOP36 rm64 \324\2\x0F\x1C\204 X64,UNDOC
+HINT_NOP37 rm16 \320\2\x0F\x1C\205 P6,UNDOC
+HINT_NOP37 rm32 \321\2\x0F\x1C\205 P6,UNDOC
+HINT_NOP37 rm64 \324\2\x0F\x1C\205 X64,UNDOC
+HINT_NOP38 rm16 \320\2\x0F\x1C\206 P6,UNDOC
+HINT_NOP38 rm32 \321\2\x0F\x1C\206 P6,UNDOC
+HINT_NOP38 rm64 \324\2\x0F\x1C\206 X64,UNDOC
+HINT_NOP39 rm16 \320\2\x0F\x1C\207 P6,UNDOC
+HINT_NOP39 rm32 \321\2\x0F\x1C\207 P6,UNDOC
+HINT_NOP39 rm64 \324\2\x0F\x1C\207 X64,UNDOC
+HINT_NOP40 rm16 \320\2\x0F\x1D\200 P6,UNDOC
+HINT_NOP40 rm32 \321\2\x0F\x1D\200 P6,UNDOC
+HINT_NOP40 rm64 \324\2\x0F\x1D\200 X64,UNDOC
+HINT_NOP41 rm16 \320\2\x0F\x1D\201 P6,UNDOC
+HINT_NOP41 rm32 \321\2\x0F\x1D\201 P6,UNDOC
+HINT_NOP41 rm64 \324\2\x0F\x1D\201 X64,UNDOC
+HINT_NOP42 rm16 \320\2\x0F\x1D\202 P6,UNDOC
+HINT_NOP42 rm32 \321\2\x0F\x1D\202 P6,UNDOC
+HINT_NOP42 rm64 \324\2\x0F\x1D\202 X64,UNDOC
+HINT_NOP43 rm16 \320\2\x0F\x1D\203 P6,UNDOC
+HINT_NOP43 rm32 \321\2\x0F\x1D\203 P6,UNDOC
+HINT_NOP43 rm64 \324\2\x0F\x1D\203 X64,UNDOC
+HINT_NOP44 rm16 \320\2\x0F\x1D\204 P6,UNDOC
+HINT_NOP44 rm32 \321\2\x0F\x1D\204 P6,UNDOC
+HINT_NOP44 rm64 \324\2\x0F\x1D\204 X64,UNDOC
+HINT_NOP45 rm16 \320\2\x0F\x1D\205 P6,UNDOC
+HINT_NOP45 rm32 \321\2\x0F\x1D\205 P6,UNDOC
+HINT_NOP45 rm64 \324\2\x0F\x1D\205 X64,UNDOC
+HINT_NOP46 rm16 \320\2\x0F\x1D\206 P6,UNDOC
+HINT_NOP46 rm32 \321\2\x0F\x1D\206 P6,UNDOC
+HINT_NOP46 rm64 \324\2\x0F\x1D\206 X64,UNDOC
+HINT_NOP47 rm16 \320\2\x0F\x1D\207 P6,UNDOC
+HINT_NOP47 rm32 \321\2\x0F\x1D\207 P6,UNDOC
+HINT_NOP47 rm64 \324\2\x0F\x1D\207 X64,UNDOC
+HINT_NOP48 rm16 \320\2\x0F\x1E\200 P6,UNDOC
+HINT_NOP48 rm32 \321\2\x0F\x1E\200 P6,UNDOC
+HINT_NOP48 rm64 \324\2\x0F\x1E\200 X64,UNDOC
+HINT_NOP49 rm16 \320\2\x0F\x1E\201 P6,UNDOC
+HINT_NOP49 rm32 \321\2\x0F\x1E\201 P6,UNDOC
+HINT_NOP49 rm64 \324\2\x0F\x1E\201 X64,UNDOC
+HINT_NOP50 rm16 \320\2\x0F\x1E\202 P6,UNDOC
+HINT_NOP50 rm32 \321\2\x0F\x1E\202 P6,UNDOC
+HINT_NOP50 rm64 \324\2\x0F\x1E\202 X64,UNDOC
+HINT_NOP51 rm16 \320\2\x0F\x1E\203 P6,UNDOC
+HINT_NOP51 rm32 \321\2\x0F\x1E\203 P6,UNDOC
+HINT_NOP51 rm64 \324\2\x0F\x1E\203 X64,UNDOC
+HINT_NOP52 rm16 \320\2\x0F\x1E\204 P6,UNDOC
+HINT_NOP52 rm32 \321\2\x0F\x1E\204 P6,UNDOC
+HINT_NOP52 rm64 \324\2\x0F\x1E\204 X64,UNDOC
+HINT_NOP53 rm16 \320\2\x0F\x1E\205 P6,UNDOC
+HINT_NOP53 rm32 \321\2\x0F\x1E\205 P6,UNDOC
+HINT_NOP53 rm64 \324\2\x0F\x1E\205 X64,UNDOC
+HINT_NOP54 rm16 \320\2\x0F\x1E\206 P6,UNDOC
+HINT_NOP54 rm32 \321\2\x0F\x1E\206 P6,UNDOC
+HINT_NOP54 rm64 \324\2\x0F\x1E\206 X64,UNDOC
+HINT_NOP55 rm16 \320\2\x0F\x1E\207 P6,UNDOC
+HINT_NOP55 rm32 \321\2\x0F\x1E\207 P6,UNDOC
+HINT_NOP55 rm64 \324\2\x0F\x1E\207 X64,UNDOC
+HINT_NOP56 rm16 \320\2\x0F\x1F\200 P6,UNDOC
+HINT_NOP56 rm32 \321\2\x0F\x1F\200 P6,UNDOC
+HINT_NOP56 rm64 \324\2\x0F\x1F\200 X64,UNDOC
+HINT_NOP57 rm16 \320\2\x0F\x1F\201 P6,UNDOC
+HINT_NOP57 rm32 \321\2\x0F\x1F\201 P6,UNDOC
+HINT_NOP57 rm64 \324\2\x0F\x1F\201 X64,UNDOC
+HINT_NOP58 rm16 \320\2\x0F\x1F\202 P6,UNDOC
+HINT_NOP58 rm32 \321\2\x0F\x1F\202 P6,UNDOC
+HINT_NOP58 rm64 \324\2\x0F\x1F\202 X64,UNDOC
+HINT_NOP59 rm16 \320\2\x0F\x1F\203 P6,UNDOC
+HINT_NOP59 rm32 \321\2\x0F\x1F\203 P6,UNDOC
+HINT_NOP59 rm64 \324\2\x0F\x1F\203 X64,UNDOC
+HINT_NOP60 rm16 \320\2\x0F\x1F\204 P6,UNDOC
+HINT_NOP60 rm32 \321\2\x0F\x1F\204 P6,UNDOC
+HINT_NOP60 rm64 \324\2\x0F\x1F\204 X64,UNDOC
+HINT_NOP61 rm16 \320\2\x0F\x1F\205 P6,UNDOC
+HINT_NOP61 rm32 \321\2\x0F\x1F\205 P6,UNDOC
+HINT_NOP61 rm64 \324\2\x0F\x1F\205 X64,UNDOC
+HINT_NOP62 rm16 \320\2\x0F\x1F\206 P6,UNDOC
+HINT_NOP62 rm32 \321\2\x0F\x1F\206 P6,UNDOC
+HINT_NOP62 rm64 \324\2\x0F\x1F\206 X64,UNDOC
+HINT_NOP63 rm16 \320\2\x0F\x1F\207 P6,UNDOC
+HINT_NOP63 rm32 \321\2\x0F\x1F\207 P6,UNDOC
+HINT_NOP63 rm64 \324\2\x0F\x1F\207 X64,UNDOC