diff options
author | Angelos Mouzakitis <a.mouzakitis@virtualopensystems.com> | 2023-10-10 14:33:42 +0000 |
---|---|---|
committer | Angelos Mouzakitis <a.mouzakitis@virtualopensystems.com> | 2023-10-10 14:33:42 +0000 |
commit | af1a266670d040d2f4083ff309d732d648afba2a (patch) | |
tree | 2fc46203448ddcc6f81546d379abfaeb323575e9 /capstone/suite/MC/X86 | |
parent | e02cda008591317b1625707ff8e115a4841aa889 (diff) |
Change-Id: Iaf8d18082d3991dec7c0ebbea540f092188eb4ec
Diffstat (limited to 'capstone/suite/MC/X86')
19 files changed, 2781 insertions, 0 deletions
diff --git a/capstone/suite/MC/X86/3DNow.s.cs b/capstone/suite/MC/X86/3DNow.s.cs new file mode 100644 index 000000000..495577a24 --- /dev/null +++ b/capstone/suite/MC/X86/3DNow.s.cs @@ -0,0 +1,29 @@ +# CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT +0x0f,0x0f,0xca,0xbf = pavgusb %mm2, %mm1 +0x0f,0x0f,0x5c,0x16,0x09,0xbf = pavgusb 9(%esi, %edx), %mm3 +0x0f,0x0f,0xca,0x1d = pf2id %mm2, %mm1 +0x0f,0x0f,0x5c,0x16,0x09,0x1d = pf2id 9(%esi, %edx), %mm3 +0x0f,0x0f,0xca,0xae = pfacc %mm2, %mm1 +0x0f,0x0f,0xca,0x9e = pfadd %mm2, %mm1 +0x0f,0x0f,0xca,0xb0 = pfcmpeq %mm2, %mm1 +0x0f,0x0f,0xca,0x90 = pfcmpge %mm2, %mm1 +0x0f,0x0f,0xca,0xa0 = pfcmpgt %mm2, %mm1 +0x0f,0x0f,0xca,0xa4 = pfmax %mm2, %mm1 +0x0f,0x0f,0xca,0x94 = pfmin %mm2, %mm1 +0x0f,0x0f,0xca,0xb4 = pfmul %mm2, %mm1 +0x0f,0x0f,0xca,0x96 = pfrcp %mm2, %mm1 +0x0f,0x0f,0xca,0xa6 = pfrcpit1 %mm2, %mm1 +0x0f,0x0f,0xca,0xb6 = pfrcpit2 %mm2, %mm1 +0x0f,0x0f,0xca,0xa7 = pfrsqit1 %mm2, %mm1 +0x0f,0x0f,0xca,0x97 = pfrsqrt %mm2, %mm1 +0x0f,0x0f,0xca,0x9a = pfsub %mm2, %mm1 +0x0f,0x0f,0xca,0xaa = pfsubr %mm2, %mm1 +0x0f,0x0f,0xca,0x0d = pi2fd %mm2, %mm1 +0x0f,0x0f,0xca,0xb7 = pmulhrw %mm2, %mm1 +0x0f,0x0e = femms +0x0f,0x0d,0x00 = prefetch (%eax) +0x0f,0x0f,0xca,0x1c = pf2iw %mm2, %mm1 +0x0f,0x0f,0xca,0x0c = pi2fw %mm2, %mm1 +0x0f,0x0f,0xca,0x8a = pfnacc %mm2, %mm1 +0x0f,0x0f,0xca,0x8e = pfpnacc %mm2, %mm1 +0x0f,0x0f,0xca,0xbb = pswapd %mm2, %mm1 diff --git a/capstone/suite/MC/X86/address-size.s.cs b/capstone/suite/MC/X86/address-size.s.cs new file mode 100644 index 000000000..209e72b86 --- /dev/null +++ b/capstone/suite/MC/X86/address-size.s.cs @@ -0,0 +1,5 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x67,0xc6,0x06,0x00 = movb $0x0, (%esi) +0xc6,0x06,0x00 = movb $0x0, (%rsi) +0x67,0xc6,0x06,0x00 = movb $0x0, (%esi) +0xc6,0x06,0x00 = movb $0x0, (%rsi) diff --git a/capstone/suite/MC/X86/avx512-encodings.s.cs b/capstone/suite/MC/X86/avx512-encodings.s.cs new file mode 100644 index 000000000..529431ae9 --- /dev/null +++ b/capstone/suite/MC/X86/avx512-encodings.s.cs @@ -0,0 +1,12 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x62,0xa3,0x55,0x48,0x38,0xcd,0x01 = vinserti32x4 $1, %xmm21, %zmm5, %zmm17 +0x62,0xe3,0x1d,0x40,0x38,0x4f,0x10,0x01 = vinserti32x4 $1, 256(%rdi), %zmm28, %zmm17 +0x62,0x33,0x7d,0x48,0x39,0xc9,0x01 = vextracti32x4 $1, %zmm9, %xmm17 +0x62,0x33,0xfd,0x48,0x3b,0xc9,0x01 = vextracti64x4 $1, %zmm9, %ymm17 +0x62,0x73,0xfd,0x48,0x3b,0x4f,0x10,0x01 = vextracti64x4 $1, %zmm9, 512(%rdi) +0x62,0xb1,0x35,0x40,0x72,0xe1,0x02 = vpsrad $2, %zmm17, %zmm25 +0x62,0xf1,0x35,0x40,0x72,0x64,0xb7,0x08,0x02 = vpsrad $2, 512(%rdi, %rsi, 4), %zmm25 +0x62,0x21,0x1d,0x48,0xe2,0xc9 = vpsrad %xmm17, %zmm12, %zmm25 +0x62,0x61,0x1d,0x48,0xe2,0x4c,0xb7,0x20 = vpsrad 512(%rdi, %rsi, 4), %zmm12, %zmm25 +0x62,0xf2,0x7d,0xc9,0x58,0xc8 = vpbroadcastd %xmm0, %zmm1 {%k1} {z} +0x62,0xf1,0xfe,0x4b,0x6f,0xc8 = vmovdqu64 %zmm0, %zmm1 {%k3} diff --git a/capstone/suite/MC/X86/intel-syntax-encoding.s.cs b/capstone/suite/MC/X86/intel-syntax-encoding.s.cs new file mode 100644 index 000000000..452c2ed05 --- /dev/null +++ b/capstone/suite/MC/X86/intel-syntax-encoding.s.cs @@ -0,0 +1,30 @@ +# CS_ARCH_X86, CS_MODE_64, None +0x66,0x83,0xf0,0x0c = xor ax, 12 +0x83,0xf0,0x0c = xor eax, 12 +0x48,0x83,0xf0,0x0c = xor rax, 12 +0x66,0x83,0xc8,0x0c = or ax, 12 +0x83,0xc8,0x0c = or eax, 12 +0x48,0x83,0xc8,0x0c = or rax, 12 +0x66,0x83,0xf8,0x0c = cmp ax, 12 +0x83,0xf8,0x0c = cmp eax, 12 +0x48,0x83,0xf8,0x0c = cmp rax, 12 +0x48,0x89,0x44,0x24,0xf0 = mov QWORD PTR [RSP - 16], RAX +0x66,0x83,0xc0,0xf4 = add ax, -12 +0x83,0xc0,0xf4 = add eax, -12 +0x48,0x83,0xc0,0xf4 = add rax, -12 +0x66,0x83,0xd0,0xf4 = adc ax, -12 +0x83,0xd0,0xf4 = adc eax, -12 +0x48,0x83,0xd0,0xf4 = adc rax, -12 +0x66,0x83,0xd8,0xf4 = sbb ax, -12 +0x83,0xd8,0xf4 = sbb eax, -12 +0x48,0x83,0xd8,0xf4 = sbb rax, -12 +0x66,0x83,0xf8,0xf4 = cmp ax, -12 +0x83,0xf8,0xf4 = cmp eax, -12 +0x48,0x83,0xf8,0xf4 = cmp rax, -12 +0xf2,0x0f,0x10,0x2c,0x25,0xf8,0xff,0xff,0xff = movsd xmm5, qword ptr [0xfffffffffffffff8] +0xd1,0xe7 = shl EDI, 1 +0x0f,0xc2,0xd1,0x01 = cmpltps XMM2, XMM1 +0xc3 = ret +0xcb = retf +0xc2,0x08,0x00 = ret 8 +0xca,0x08,0x00 = retf 8 diff --git a/capstone/suite/MC/X86/x86-32-avx.s.cs b/capstone/suite/MC/X86/x86-32-avx.s.cs new file mode 100644 index 000000000..2239e1c72 --- /dev/null +++ b/capstone/suite/MC/X86/x86-32-avx.s.cs @@ -0,0 +1,826 @@ +# CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT +0xc5,0xca,0x58,0xd4 = vaddss %xmm4, %xmm6, %xmm2 +0xc5,0xca,0x59,0xd4 = vmulss %xmm4, %xmm6, %xmm2 +0xc5,0xca,0x5c,0xd4 = vsubss %xmm4, %xmm6, %xmm2 +0xc5,0xca,0x5e,0xd4 = vdivss %xmm4, %xmm6, %xmm2 +0xc5,0xcb,0x58,0xd4 = vaddsd %xmm4, %xmm6, %xmm2 +0xc5,0xcb,0x59,0xd4 = vmulsd %xmm4, %xmm6, %xmm2 +0xc5,0xcb,0x5c,0xd4 = vsubsd %xmm4, %xmm6, %xmm2 +0xc5,0xcb,0x5e,0xd4 = vdivsd %xmm4, %xmm6, %xmm2 +0xc5,0xea,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xea,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xea,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xea,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivss 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xeb,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xeb,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xeb,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xeb,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivsd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xc8,0x58,0xd4 = vaddps %xmm4, %xmm6, %xmm2 +0xc5,0xc8,0x5c,0xd4 = vsubps %xmm4, %xmm6, %xmm2 +0xc5,0xc8,0x59,0xd4 = vmulps %xmm4, %xmm6, %xmm2 +0xc5,0xc8,0x5e,0xd4 = vdivps %xmm4, %xmm6, %xmm2 +0xc5,0xc9,0x58,0xd4 = vaddpd %xmm4, %xmm6, %xmm2 +0xc5,0xc9,0x5c,0xd4 = vsubpd %xmm4, %xmm6, %xmm2 +0xc5,0xc9,0x59,0xd4 = vmulpd %xmm4, %xmm6, %xmm2 +0xc5,0xc9,0x5e,0xd4 = vdivpd %xmm4, %xmm6, %xmm2 +0xc5,0xe8,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe8,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe8,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe8,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivps 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x58,0xac,0xcb,0xef,0xbe,0xad,0xde = vaddpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x5c,0xac,0xcb,0xef,0xbe,0xad,0xde = vsubpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x59,0xac,0xcb,0xef,0xbe,0xad,0xde = vmulpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x5e,0xac,0xcb,0xef,0xbe,0xad,0xde = vdivpd 3735928559(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xda,0x5f,0xf2 = vmaxss %xmm2, %xmm4, %xmm6 +0xc5,0xdb,0x5f,0xf2 = vmaxsd %xmm2, %xmm4, %xmm6 +0xc5,0xda,0x5d,0xf2 = vminss %xmm2, %xmm4, %xmm6 +0xc5,0xdb,0x5d,0xf2 = vminsd %xmm2, %xmm4, %xmm6 +0xc5,0xea,0x5f,0x6c,0xcb,0xfc = vmaxss -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xeb,0x5f,0x6c,0xcb,0xfc = vmaxsd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xea,0x5d,0x6c,0xcb,0xfc = vminss -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xeb,0x5d,0x6c,0xcb,0xfc = vminsd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xd8,0x5f,0xf2 = vmaxps %xmm2, %xmm4, %xmm6 +0xc5,0xd9,0x5f,0xf2 = vmaxpd %xmm2, %xmm4, %xmm6 +0xc5,0xd8,0x5d,0xf2 = vminps %xmm2, %xmm4, %xmm6 +0xc5,0xd9,0x5d,0xf2 = vminpd %xmm2, %xmm4, %xmm6 +0xc5,0xe8,0x5f,0x6c,0xcb,0xfc = vmaxps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x5f,0x6c,0xcb,0xfc = vmaxpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe8,0x5d,0x6c,0xcb,0xfc = vminps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x5d,0x6c,0xcb,0xfc = vminpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xd8,0x54,0xf2 = vandps %xmm2, %xmm4, %xmm6 +0xc5,0xd9,0x54,0xf2 = vandpd %xmm2, %xmm4, %xmm6 +0xc5,0xe8,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xd8,0x56,0xf2 = vorps %xmm2, %xmm4, %xmm6 +0xc5,0xd9,0x56,0xf2 = vorpd %xmm2, %xmm4, %xmm6 +0xc5,0xe8,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xd8,0x57,0xf2 = vxorps %xmm2, %xmm4, %xmm6 +0xc5,0xd9,0x57,0xf2 = vxorpd %xmm2, %xmm4, %xmm6 +0xc5,0xe8,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xd8,0x55,0xf2 = vandnps %xmm2, %xmm4, %xmm6 +0xc5,0xd9,0x55,0xf2 = vandnpd %xmm2, %xmm4, %xmm6 +0xc5,0xe8,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xfa,0x10,0x6c,0xcb,0xfc = vmovss -4(%ebx, %ecx, 8), %xmm5 +0xc5,0xea,0x10,0xec = vmovss %xmm4, %xmm2, %xmm5 +0xc5,0xfb,0x10,0x6c,0xcb,0xfc = vmovsd -4(%ebx, %ecx, 8), %xmm5 +0xc5,0xeb,0x10,0xec = vmovsd %xmm4, %xmm2, %xmm5 +0xc5,0xe8,0x15,0xe1 = vunpckhps %xmm1, %xmm2, %xmm4 +0xc5,0xe9,0x15,0xe1 = vunpckhpd %xmm1, %xmm2, %xmm4 +0xc5,0xe8,0x14,0xe1 = vunpcklps %xmm1, %xmm2, %xmm4 +0xc5,0xe9,0x14,0xe1 = vunpcklpd %xmm1, %xmm2, %xmm4 +0xc5,0xe8,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe8,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xe9,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx, %ecx, 8), %xmm2, %xmm5 +0xc5,0xc8,0xc2,0xc8,0x00 = vcmpeqps %xmm0, %xmm6, %xmm1 +0xc5,0xc8,0xc2,0x08,0x00 = vcmpeqps (%eax), %xmm6, %xmm1 +0xc5,0xc8,0xc2,0xc8,0x07 = vcmpordps %xmm0, %xmm6, %xmm1 +0xc5,0xc9,0xc2,0xc8,0x00 = vcmpeqpd %xmm0, %xmm6, %xmm1 +0xc5,0xc9,0xc2,0x08,0x00 = vcmpeqpd (%eax), %xmm6, %xmm1 +0xc5,0xc9,0xc2,0xc8,0x07 = vcmpordpd %xmm0, %xmm6, %xmm1 +0xc5,0xe8,0xc6,0xd9,0x08 = vshufps $8, %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc6,0x5c,0xcb,0xfc,0x08 = vshufps $8, -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe9,0xc6,0xd9,0x08 = vshufpd $8, %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc6,0x5c,0xcb,0xfc,0x08 = vshufpd $8, -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x00 = vcmpeqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x02 = vcmpleps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x01 = vcmpltps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x04 = vcmpneqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x06 = vcmpnleps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x05 = vcmpnltps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x07 = vcmpordps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x03 = vcmpunordps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx, %ecx, 8), %xmm6, %xmm2 +0xc5,0xe8,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x00 = vcmpeqpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x02 = vcmplepd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x01 = vcmpltpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x04 = vcmpneqpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x06 = vcmpnlepd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x05 = vcmpnltpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x07 = vcmpordpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0xd9,0x03 = vcmpunordpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx, %ecx, 8), %xmm6, %xmm2 +0xc5,0xe9,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax +0xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax +0xc5,0xfc,0x50,0xc2 = vmovmskps %ymm2, %eax +0xc5,0xfd,0x50,0xc2 = vmovmskpd %ymm2, %eax +0xc5,0xea,0xc2,0xd9,0x00 = vcmpeqss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0xd9,0x02 = vcmpless %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0xd9,0x01 = vcmpltss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0xd9,0x04 = vcmpneqss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0xd9,0x06 = vcmpnless %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0xd9,0x05 = vcmpnltss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0xd9,0x07 = vcmpordss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0xd9,0x03 = vcmpunordss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqss -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpless -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltss -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqss -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnless -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltss -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%ebx, %ecx, 8), %xmm6, %xmm2 +0xc5,0xea,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordss -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x00 = vcmpeqsd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x02 = vcmplesd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x01 = vcmpltsd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x04 = vcmpneqsd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x06 = vcmpnlesd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x05 = vcmpnltsd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x07 = vcmpordsd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0xd9,0x03 = vcmpunordsd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqsd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplesd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltsd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqsd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlesd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltsd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%ebx, %ecx, 8), %xmm6, %xmm2 +0xc5,0xeb,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordsd -4(%ebx, %ecx, 8), %xmm2, %xmm3 +0xc5,0xf8,0x2e,0xd1 = vucomiss %xmm1, %xmm2 +0xc5,0xf8,0x2e,0x10 = vucomiss (%eax), %xmm2 +0xc5,0xf8,0x2f,0xd1 = vcomiss %xmm1, %xmm2 +0xc5,0xf8,0x2f,0x10 = vcomiss (%eax), %xmm2 +0xc5,0xf9,0x2e,0xd1 = vucomisd %xmm1, %xmm2 +0xc5,0xf9,0x2e,0x10 = vucomisd (%eax), %xmm2 +0xc5,0xf9,0x2f,0xd1 = vcomisd %xmm1, %xmm2 +0xc5,0xf9,0x2f,0x10 = vcomisd (%eax), %xmm2 +0xc5,0xfa,0x2c,0xc1 = vcvttss2si %xmm1, %eax +0xc5,0xf2,0x2a,0x10 = vcvtsi2ssl (%eax), %xmm1, %xmm2 +0xc5,0xfb,0x2c,0xc1 = vcvttsd2si %xmm1, %eax +0xc5,0xfb,0x2c,0x01 = vcvttsd2si (%ecx), %eax +0xc5,0xf3,0x2a,0x10 = vcvtsi2sdl (%eax), %xmm1, %xmm2 +0xc5,0xf8,0x28,0x10 = vmovaps (%eax), %xmm2 +0xc5,0xf8,0x28,0xd1 = vmovaps %xmm1, %xmm2 +0xc5,0xf8,0x29,0x08 = vmovaps %xmm1, (%eax) +0xc5,0xf9,0x28,0x10 = vmovapd (%eax), %xmm2 +0xc5,0xf9,0x28,0xd1 = vmovapd %xmm1, %xmm2 +0xc5,0xf9,0x29,0x08 = vmovapd %xmm1, (%eax) +0xc5,0xf8,0x10,0x10 = vmovups (%eax), %xmm2 +0xc5,0xf8,0x10,0xd1 = vmovups %xmm1, %xmm2 +0xc5,0xf8,0x11,0x08 = vmovups %xmm1, (%eax) +0xc5,0xf9,0x10,0x10 = vmovupd (%eax), %xmm2 +0xc5,0xf9,0x10,0xd1 = vmovupd %xmm1, %xmm2 +0xc5,0xf9,0x11,0x08 = vmovupd %xmm1, (%eax) +0xc5,0xf8,0x13,0x08 = vmovlps %xmm1, (%eax) +0xc5,0xe8,0x12,0x18 = vmovlps (%eax), %xmm2, %xmm3 +0xc5,0xf9,0x13,0x08 = vmovlpd %xmm1, (%eax) +0xc5,0xe9,0x12,0x18 = vmovlpd (%eax), %xmm2, %xmm3 +0xc5,0xf8,0x17,0x08 = vmovhps %xmm1, (%eax) +0xc5,0xe8,0x16,0x18 = vmovhps (%eax), %xmm2, %xmm3 +0xc5,0xf9,0x17,0x08 = vmovhpd %xmm1, (%eax) +0xc5,0xe9,0x16,0x18 = vmovhpd (%eax), %xmm2, %xmm3 +0xc5,0xe8,0x16,0xd9 = vmovlhps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0x12,0xd9 = vmovhlps %xmm1, %xmm2, %xmm3 +0xc5,0xfa,0x2d,0xc1 = vcvtss2si %xmm1, %eax +0xc5,0xfa,0x2d,0x18 = vcvtss2si (%eax), %ebx +0xc5,0xfa,0x2d,0xc1 = vcvtss2si %xmm1, %eax +0xc5,0xfa,0x2d,0x18 = vcvtss2si (%eax), %ebx +0xc5,0xf8,0x5b,0xf5 = vcvtdq2ps %xmm5, %xmm6 +0xc5,0xf8,0x5b,0x30 = vcvtdq2ps (%eax), %xmm6 +0xc5,0xdb,0x5a,0xf2 = vcvtsd2ss %xmm2, %xmm4, %xmm6 +0xc5,0xdb,0x5a,0x30 = vcvtsd2ss (%eax), %xmm4, %xmm6 +0xc5,0xf9,0x5b,0xda = vcvtps2dq %xmm2, %xmm3 +0xc5,0xf9,0x5b,0x18 = vcvtps2dq (%eax), %xmm3 +0xc5,0xda,0x5a,0xf2 = vcvtss2sd %xmm2, %xmm4, %xmm6 +0xc5,0xda,0x5a,0x30 = vcvtss2sd (%eax), %xmm4, %xmm6 +0xc5,0xf8,0x5b,0xf4 = vcvtdq2ps %xmm4, %xmm6 +0xc5,0xf8,0x5b,0x21 = vcvtdq2ps (%ecx), %xmm4 +0xc5,0xfa,0x5b,0xda = vcvttps2dq %xmm2, %xmm3 +0xc5,0xfa,0x5b,0x18 = vcvttps2dq (%eax), %xmm3 +0xc5,0xf8,0x5a,0xda = vcvtps2pd %xmm2, %xmm3 +0xc5,0xf8,0x5a,0x18 = vcvtps2pd (%eax), %xmm3 +0xc5,0xf9,0x5a,0xda = vcvtpd2ps %xmm2, %xmm3 +0xc5,0xf9,0x51,0xd1 = vsqrtpd %xmm1, %xmm2 +0xc5,0xf9,0x51,0x10 = vsqrtpd (%eax), %xmm2 +0xc5,0xf8,0x51,0xd1 = vsqrtps %xmm1, %xmm2 +0xc5,0xf8,0x51,0x10 = vsqrtps (%eax), %xmm2 +0xc5,0xeb,0x51,0xd9 = vsqrtsd %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0x51,0x18 = vsqrtsd (%eax), %xmm2, %xmm3 +0xc5,0xea,0x51,0xd9 = vsqrtss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0x51,0x18 = vsqrtss (%eax), %xmm2, %xmm3 +0xc5,0xf8,0x52,0xd1 = vrsqrtps %xmm1, %xmm2 +0xc5,0xf8,0x52,0x10 = vrsqrtps (%eax), %xmm2 +0xc5,0xea,0x52,0xd9 = vrsqrtss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0x52,0x18 = vrsqrtss (%eax), %xmm2, %xmm3 +0xc5,0xf8,0x53,0xd1 = vrcpps %xmm1, %xmm2 +0xc5,0xf8,0x53,0x10 = vrcpps (%eax), %xmm2 +0xc5,0xea,0x53,0xd9 = vrcpss %xmm1, %xmm2, %xmm3 +0xc5,0xea,0x53,0x18 = vrcpss (%eax), %xmm2, %xmm3 +0xc5,0xf9,0xe7,0x08 = vmovntdq %xmm1, (%eax) +0xc5,0xf9,0x2b,0x08 = vmovntpd %xmm1, (%eax) +0xc5,0xf8,0x2b,0x08 = vmovntps %xmm1, (%eax) +0xc5,0xf8,0xae,0x10 = vldmxcsr (%eax) +0xc5,0xf8,0xae,0x18 = vstmxcsr (%eax) +0xc5,0xf8,0xae,0x15,0xef,0xbe,0xad,0xde = vldmxcsr 0xdeadbeef +0xc5,0xf8,0xae,0x1d,0xef,0xbe,0xad,0xde = vstmxcsr 0xdeadbeef +0xc5,0xe9,0xf8,0xd9 = vpsubb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xf8,0x18 = vpsubb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xf9,0xd9 = vpsubw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xf9,0x18 = vpsubw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xfa,0xd9 = vpsubd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xfa,0x18 = vpsubd (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xfb,0xd9 = vpsubq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xfb,0x18 = vpsubq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe8,0xd9 = vpsubsb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe8,0x18 = vpsubsb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe9,0xd9 = vpsubsw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe9,0x18 = vpsubsw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xd8,0xd9 = vpsubusb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xd8,0x18 = vpsubusb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xd9,0xd9 = vpsubusw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xd9,0x18 = vpsubusw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xfc,0xd9 = vpaddb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xfc,0x18 = vpaddb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xfd,0xd9 = vpaddw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xfd,0x18 = vpaddw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xfe,0xd9 = vpaddd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xfe,0x18 = vpaddd (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xd4,0xd9 = vpaddq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xd4,0x18 = vpaddq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xec,0xd9 = vpaddsb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xec,0x18 = vpaddsb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xed,0xd9 = vpaddsw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xed,0x18 = vpaddsw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xdc,0xd9 = vpaddusb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xdc,0x18 = vpaddusb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xdd,0xd9 = vpaddusw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xdd,0x18 = vpaddusw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe4,0xd9 = vpmulhuw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe4,0x18 = vpmulhuw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe5,0xd9 = vpmulhw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe5,0x18 = vpmulhw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xd5,0xd9 = vpmullw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xd5,0x18 = vpmullw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xf4,0xd9 = vpmuludq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xf4,0x18 = vpmuludq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe0,0xd9 = vpavgb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe0,0x18 = vpavgb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe3,0xd9 = vpavgw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe3,0x18 = vpavgw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xea,0xd9 = vpminsw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xea,0x18 = vpminsw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xda,0xd9 = vpminub %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xda,0x18 = vpminub (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xee,0xd9 = vpmaxsw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xee,0x18 = vpmaxsw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xde,0xd9 = vpmaxub %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xde,0x18 = vpmaxub (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xf6,0xd9 = vpsadbw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xf6,0x18 = vpsadbw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xf1,0xd9 = vpsllw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xf1,0x18 = vpsllw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xf2,0xd9 = vpslld %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xf2,0x18 = vpslld (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xf3,0xd9 = vpsllq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xf3,0x18 = vpsllq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe1,0xd9 = vpsraw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe1,0x18 = vpsraw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xe2,0xd9 = vpsrad %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xe2,0x18 = vpsrad (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xd1,0xd9 = vpsrlw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xd1,0x18 = vpsrlw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xd2,0xd9 = vpsrld %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xd2,0x18 = vpsrld (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xd3,0xd9 = vpsrlq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xd3,0x18 = vpsrlq (%eax), %xmm2, %xmm3 +0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3 +0xc5,0xe1,0x73,0xfa,0x0a = vpslldq $10, %xmm2, %xmm3 +0xc5,0xe1,0x73,0xf2,0x0a = vpsllq $10, %xmm2, %xmm3 +0xc5,0xe1,0x71,0xf2,0x0a = vpsllw $10, %xmm2, %xmm3 +0xc5,0xe1,0x72,0xe2,0x0a = vpsrad $10, %xmm2, %xmm3 +0xc5,0xe1,0x71,0xe2,0x0a = vpsraw $10, %xmm2, %xmm3 +0xc5,0xe1,0x72,0xd2,0x0a = vpsrld $10, %xmm2, %xmm3 +0xc5,0xe1,0x73,0xda,0x0a = vpsrldq $10, %xmm2, %xmm3 +0xc5,0xe1,0x73,0xd2,0x0a = vpsrlq $10, %xmm2, %xmm3 +0xc5,0xe1,0x71,0xd2,0x0a = vpsrlw $10, %xmm2, %xmm3 +0xc5,0xe1,0x72,0xf2,0x0a = vpslld $10, %xmm2, %xmm3 +0xc5,0xe9,0xdb,0xd9 = vpand %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xdb,0x18 = vpand (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xeb,0xd9 = vpor %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xeb,0x18 = vpor (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xef,0xd9 = vpxor %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xef,0x18 = vpxor (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xdf,0xd9 = vpandn %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0xdf,0x18 = vpandn (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x74,0xd9 = vpcmpeqb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x74,0x18 = vpcmpeqb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x75,0xd9 = vpcmpeqw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x75,0x18 = vpcmpeqw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x76,0xd9 = vpcmpeqd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x76,0x18 = vpcmpeqd (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x64,0xd9 = vpcmpgtb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x64,0x18 = vpcmpgtb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x65,0xd9 = vpcmpgtw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x65,0x18 = vpcmpgtw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x66,0xd9 = vpcmpgtd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x66,0x18 = vpcmpgtd (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x63,0xd9 = vpacksswb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x63,0x18 = vpacksswb (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x6b,0xd9 = vpackssdw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x6b,0x18 = vpackssdw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x67,0xd9 = vpackuswb %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x67,0x18 = vpackuswb (%eax), %xmm2, %xmm3 +0xc5,0xf9,0x70,0xda,0x04 = vpshufd $4, %xmm2, %xmm3 +0xc5,0xf9,0x70,0x18,0x04 = vpshufd $4, (%eax), %xmm3 +0xc5,0xfa,0x70,0xda,0x04 = vpshufhw $4, %xmm2, %xmm3 +0xc5,0xfa,0x70,0x18,0x04 = vpshufhw $4, (%eax), %xmm3 +0xc5,0xfb,0x70,0xda,0x04 = vpshuflw $4, %xmm2, %xmm3 +0xc5,0xfb,0x70,0x18,0x04 = vpshuflw $4, (%eax), %xmm3 +0xc5,0xe9,0x60,0xd9 = vpunpcklbw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x60,0x18 = vpunpcklbw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x61,0xd9 = vpunpcklwd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x61,0x18 = vpunpcklwd (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x62,0xd9 = vpunpckldq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x62,0x18 = vpunpckldq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x6c,0xd9 = vpunpcklqdq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x6c,0x18 = vpunpcklqdq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x68,0xd9 = vpunpckhbw %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x68,0x18 = vpunpckhbw (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x69,0xd9 = vpunpckhwd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x69,0x18 = vpunpckhwd (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x6a,0xd9 = vpunpckhdq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x6a,0x18 = vpunpckhdq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x6d,0xd9 = vpunpckhqdq %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x6d,0x18 = vpunpckhqdq (%eax), %xmm2, %xmm3 +0xc5,0xe9,0xc4,0xd8,0x07 = vpinsrw $7, %eax, %xmm2, %xmm3 +0xc5,0xe9,0xc4,0x18,0x07 = vpinsrw $7, (%eax), %xmm2, %xmm3 +0xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %xmm2, %eax +0xc5,0xf9,0xd7,0xc1 = vpmovmskb %xmm1, %eax +0xc5,0xf9,0xf7,0xd1 = vmaskmovdqu %xmm1, %xmm2 +0xc5,0xf9,0x7e,0xc8 = vmovd %xmm1, %eax +0xc5,0xf9,0x7e,0x08 = vmovd %xmm1, (%eax) +0xc5,0xf9,0x6e,0xc8 = vmovd %eax, %xmm1 +0xc5,0xf9,0x6e,0x08 = vmovd (%eax), %xmm1 +0xc5,0xf9,0xd6,0x08 = vmovq %xmm1, (%eax) +0xc5,0xfa,0x7e,0xd1 = vmovq %xmm1, %xmm2 +0xc5,0xfa,0x7e,0x08 = vmovq (%eax), %xmm1 +0xc5,0xfb,0xe6,0xd1 = vcvtpd2dq %xmm1, %xmm2 +0xc5,0xfa,0xe6,0xd1 = vcvtdq2pd %xmm1, %xmm2 +0xc5,0xfa,0xe6,0x10 = vcvtdq2pd (%eax), %xmm2 +0xc5,0xfa,0x16,0xd1 = vmovshdup %xmm1, %xmm2 +0xc5,0xfa,0x16,0x10 = vmovshdup (%eax), %xmm2 +0xc5,0xfa,0x12,0xd1 = vmovsldup %xmm1, %xmm2 +0xc5,0xfa,0x12,0x10 = vmovsldup (%eax), %xmm2 +0xc5,0xfb,0x12,0xd1 = vmovddup %xmm1, %xmm2 +0xc5,0xfb,0x12,0x10 = vmovddup (%eax), %xmm2 +0xc5,0xeb,0xd0,0xd9 = vaddsubps %xmm1, %xmm2, %xmm3 +0xc5,0xf3,0xd0,0x10 = vaddsubps (%eax), %xmm1, %xmm2 +0xc5,0xe9,0xd0,0xd9 = vaddsubpd %xmm1, %xmm2, %xmm3 +0xc5,0xf1,0xd0,0x10 = vaddsubpd (%eax), %xmm1, %xmm2 +0xc5,0xeb,0x7c,0xd9 = vhaddps %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0x7c,0x18 = vhaddps (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x7c,0xd9 = vhaddpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x7c,0x18 = vhaddpd (%eax), %xmm2, %xmm3 +0xc5,0xeb,0x7d,0xd9 = vhsubps %xmm1, %xmm2, %xmm3 +0xc5,0xeb,0x7d,0x18 = vhsubps (%eax), %xmm2, %xmm3 +0xc5,0xe9,0x7d,0xd9 = vhsubpd %xmm1, %xmm2, %xmm3 +0xc5,0xe9,0x7d,0x18 = vhsubpd (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x79,0x1c,0xd1 = vpabsb %xmm1, %xmm2 +0xc4,0xe2,0x79,0x1c,0x10 = vpabsb (%eax), %xmm2 +0xc4,0xe2,0x79,0x1d,0xd1 = vpabsw %xmm1, %xmm2 +0xc4,0xe2,0x79,0x1d,0x10 = vpabsw (%eax), %xmm2 +0xc4,0xe2,0x79,0x1e,0xd1 = vpabsd %xmm1, %xmm2 +0xc4,0xe2,0x79,0x1e,0x10 = vpabsd (%eax), %xmm2 +0xc4,0xe2,0x69,0x01,0xd9 = vphaddw %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x01,0x18 = vphaddw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x02,0xd9 = vphaddd %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x02,0x18 = vphaddd (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x03,0xd9 = vphaddsw %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x03,0x18 = vphaddsw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x05,0xd9 = vphsubw %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x05,0x18 = vphsubw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x06,0xd9 = vphsubd %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x06,0x18 = vphsubd (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x07,0xd9 = vphsubsw %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x07,0x18 = vphsubsw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x04,0xd9 = vpmaddubsw %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x04,0x18 = vpmaddubsw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x00,0xd9 = vpshufb %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x00,0x18 = vpshufb (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x08,0xd9 = vpsignb %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x08,0x18 = vpsignb (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x09,0xd9 = vpsignw %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x09,0x18 = vpsignw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x0a,0xd9 = vpsignd %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x0a,0x18 = vpsignd (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x69,0x0b,0xd9 = vpmulhrsw %xmm1, %xmm2, %xmm3 +0xc4,0xe2,0x69,0x0b,0x18 = vpmulhrsw (%eax), %xmm2, %xmm3 +0xc4,0xe3,0x69,0x0f,0xd9,0x07 = vpalignr $7, %xmm1, %xmm2, %xmm3 +0xc4,0xe3,0x69,0x0f,0x18,0x07 = vpalignr $7, (%eax), %xmm2, %xmm3 +0xc4,0xe3,0x69,0x0b,0xd9,0x07 = vroundsd $7, %xmm1, %xmm2, %xmm3 +0xc4,0xe3,0x69,0x0b,0x18,0x07 = vroundsd $7, (%eax), %xmm2, %xmm3 +0xc4,0xe3,0x69,0x0a,0xd9,0x07 = vroundss $7, %xmm1, %xmm2, %xmm3 +0xc4,0xe3,0x69,0x0a,0x18,0x07 = vroundss $7, (%eax), %xmm2, %xmm3 +0xc4,0xe3,0x79,0x09,0xda,0x07 = vroundpd $7, %xmm2, %xmm3 +0xc4,0xe3,0x79,0x09,0x18,0x07 = vroundpd $7, (%eax), %xmm3 +0xc4,0xe3,0x79,0x08,0xda,0x07 = vroundps $7, %xmm2, %xmm3 +0xc4,0xe3,0x79,0x08,0x18,0x07 = vroundps $7, (%eax), %xmm3 +0xc4,0xe2,0x79,0x41,0xda = vphminposuw %xmm2, %xmm3 +0xc4,0xe2,0x79,0x41,0x10 = vphminposuw (%eax), %xmm2 +0xc4,0xe2,0x61,0x2b,0xca = vpackusdw %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x2b,0x18 = vpackusdw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x29,0xca = vpcmpeqq %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x29,0x18 = vpcmpeqq (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x38,0xca = vpminsb %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x38,0x18 = vpminsb (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x39,0xca = vpminsd %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x39,0x18 = vpminsd (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x3b,0xca = vpminud %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x3b,0x18 = vpminud (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x3a,0xca = vpminuw %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x3a,0x18 = vpminuw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x3c,0xca = vpmaxsb %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x3c,0x18 = vpmaxsb (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x3d,0xca = vpmaxsd %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x3d,0x18 = vpmaxsd (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x3f,0xca = vpmaxud %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x3f,0x18 = vpmaxud (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x3e,0xca = vpmaxuw %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x3e,0x18 = vpmaxuw (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x61,0x28,0xca = vpmuldq %xmm2, %xmm3, %xmm1 +0xc4,0xe2,0x69,0x28,0x18 = vpmuldq (%eax), %xmm2, %xmm3 +0xc4,0xe2,0x51,0x40,0xca = vpmulld %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x40,0x18 = vpmulld (%eax), %xmm5, %xmm3 +0xc4,0xe3,0x51,0x0c,0xca,0x03 = vblendps $3, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x0c,0x08,0x03 = vblendps $3, (%eax), %xmm5, %xmm1 +0xc4,0xe3,0x51,0x0d,0xca,0x03 = vblendpd $3, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x0d,0x08,0x03 = vblendpd $3, (%eax), %xmm5, %xmm1 +0xc4,0xe3,0x51,0x0e,0xca,0x03 = vpblendw $3, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x0e,0x08,0x03 = vpblendw $3, (%eax), %xmm5, %xmm1 +0xc4,0xe3,0x51,0x42,0xca,0x03 = vmpsadbw $3, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x42,0x08,0x03 = vmpsadbw $3, (%eax), %xmm5, %xmm1 +0xc4,0xe3,0x51,0x40,0xca,0x03 = vdpps $3, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x40,0x08,0x03 = vdpps $3, (%eax), %xmm5, %xmm1 +0xc4,0xe3,0x51,0x41,0xca,0x03 = vdppd $3, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x41,0x08,0x03 = vdppd $3, (%eax), %xmm5, %xmm1 +0xc4,0xe3,0x71,0x4b,0xdd,0x20 = vblendvpd %xmm2, %xmm5, %xmm1, %xmm3 +0xc4,0xe3,0x71,0x4b,0x18,0x20 = vblendvpd %xmm2, (%eax), %xmm1, %xmm3 +0xc4,0xe3,0x71,0x4a,0xdd,0x20 = vblendvps %xmm2, %xmm5, %xmm1, %xmm3 +0xc4,0xe3,0x71,0x4a,0x18,0x20 = vblendvps %xmm2, (%eax), %xmm1, %xmm3 +0xc4,0xe3,0x71,0x4c,0xdd,0x20 = vpblendvb %xmm2, %xmm5, %xmm1, %xmm3 +0xc4,0xe3,0x71,0x4c,0x18,0x20 = vpblendvb %xmm2, (%eax), %xmm1, %xmm3 +0xc4,0xe2,0x79,0x20,0xea = vpmovsxbw %xmm2, %xmm5 +0xc4,0xe2,0x79,0x20,0x10 = vpmovsxbw (%eax), %xmm2 +0xc4,0xe2,0x79,0x23,0xea = vpmovsxwd %xmm2, %xmm5 +0xc4,0xe2,0x79,0x23,0x10 = vpmovsxwd (%eax), %xmm2 +0xc4,0xe2,0x79,0x25,0xea = vpmovsxdq %xmm2, %xmm5 +0xc4,0xe2,0x79,0x25,0x10 = vpmovsxdq (%eax), %xmm2 +0xc4,0xe2,0x79,0x30,0xea = vpmovzxbw %xmm2, %xmm5 +0xc4,0xe2,0x79,0x30,0x10 = vpmovzxbw (%eax), %xmm2 +0xc4,0xe2,0x79,0x33,0xea = vpmovzxwd %xmm2, %xmm5 +0xc4,0xe2,0x79,0x33,0x10 = vpmovzxwd (%eax), %xmm2 +0xc4,0xe2,0x79,0x35,0xea = vpmovzxdq %xmm2, %xmm5 +0xc4,0xe2,0x79,0x35,0x10 = vpmovzxdq (%eax), %xmm2 +0xc4,0xe2,0x79,0x22,0xea = vpmovsxbq %xmm2, %xmm5 +0xc4,0xe2,0x79,0x22,0x10 = vpmovsxbq (%eax), %xmm2 +0xc4,0xe2,0x79,0x32,0xea = vpmovzxbq %xmm2, %xmm5 +0xc4,0xe2,0x79,0x32,0x10 = vpmovzxbq (%eax), %xmm2 +0xc4,0xe2,0x79,0x21,0xea = vpmovsxbd %xmm2, %xmm5 +0xc4,0xe2,0x79,0x21,0x10 = vpmovsxbd (%eax), %xmm2 +0xc4,0xe2,0x79,0x24,0xea = vpmovsxwq %xmm2, %xmm5 +0xc4,0xe2,0x79,0x24,0x10 = vpmovsxwq (%eax), %xmm2 +0xc4,0xe2,0x79,0x31,0xea = vpmovzxbd %xmm2, %xmm5 +0xc4,0xe2,0x79,0x31,0x10 = vpmovzxbd (%eax), %xmm2 +0xc4,0xe2,0x79,0x34,0xea = vpmovzxwq %xmm2, %xmm5 +0xc4,0xe2,0x79,0x34,0x10 = vpmovzxwq (%eax), %xmm2 +0xc5,0xf9,0xc5,0xc2,0x07 = vpextrw $7, %xmm2, %eax +0xc4,0xe3,0x79,0x15,0x10,0x07 = vpextrw $7, %xmm2, (%eax) +0xc4,0xe3,0x79,0x16,0xd0,0x07 = vpextrd $7, %xmm2, %eax +0xc4,0xe3,0x79,0x16,0x10,0x07 = vpextrd $7, %xmm2, (%eax) +0xc4,0xe3,0x79,0x14,0xd0,0x07 = vpextrb $7, %xmm2, %eax +0xc4,0xe3,0x79,0x14,0x10,0x07 = vpextrb $7, %xmm2, (%eax) +0xc4,0xe3,0x79,0x17,0x10,0x07 = vextractps $7, %xmm2, (%eax) +0xc4,0xe3,0x79,0x17,0xd0,0x07 = vextractps $7, %xmm2, %eax +0xc5,0xe9,0xc4,0xe8,0x07 = vpinsrw $7, %eax, %xmm2, %xmm5 +0xc5,0xe9,0xc4,0x28,0x07 = vpinsrw $7, (%eax), %xmm2, %xmm5 +0xc4,0xe3,0x69,0x20,0xe8,0x07 = vpinsrb $7, %eax, %xmm2, %xmm5 +0xc4,0xe3,0x69,0x20,0x28,0x07 = vpinsrb $7, (%eax), %xmm2, %xmm5 +0xc4,0xe3,0x69,0x22,0xe8,0x07 = vpinsrd $7, %eax, %xmm2, %xmm5 +0xc4,0xe3,0x69,0x22,0x28,0x07 = vpinsrd $7, (%eax), %xmm2, %xmm5 +0xc4,0xe3,0x51,0x21,0xca,0x07 = vinsertps $7, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x21,0x08,0x07 = vinsertps $7, (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x79,0x17,0xea = vptest %xmm2, %xmm5 +0xc4,0xe2,0x79,0x17,0x10 = vptest (%eax), %xmm2 +0xc4,0xe2,0x79,0x2a,0x10 = vmovntdqa (%eax), %xmm2 +0xc4,0xe2,0x51,0x37,0xca = vpcmpgtq %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x37,0x18 = vpcmpgtq (%eax), %xmm5, %xmm3 +0xc4,0xe3,0x79,0x62,0xea,0x07 = vpcmpistrm $7, %xmm2, %xmm5 +0xc4,0xe3,0x79,0x62,0x28,0x07 = vpcmpistrm $7, (%eax), %xmm5 +0xc4,0xe3,0x79,0x60,0xea,0x07 = vpcmpestrm $7, %xmm2, %xmm5 +0xc4,0xe3,0x79,0x60,0x28,0x07 = vpcmpestrm $7, (%eax), %xmm5 +0xc4,0xe3,0x79,0x63,0xea,0x07 = vpcmpistri $7, %xmm2, %xmm5 +0xc4,0xe3,0x79,0x63,0x28,0x07 = vpcmpistri $7, (%eax), %xmm5 +0xc4,0xe3,0x79,0x61,0xea,0x07 = vpcmpestri $7, %xmm2, %xmm5 +0xc4,0xe3,0x79,0x61,0x28,0x07 = vpcmpestri $7, (%eax), %xmm5 +0xc4,0xe2,0x79,0xdb,0xea = vaesimc %xmm2, %xmm5 +0xc4,0xe2,0x79,0xdb,0x10 = vaesimc (%eax), %xmm2 +0xc4,0xe2,0x51,0xdc,0xca = vaesenc %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xdc,0x18 = vaesenc (%eax), %xmm5, %xmm3 +0xc4,0xe2,0x51,0xdd,0xca = vaesenclast %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xdd,0x18 = vaesenclast (%eax), %xmm5, %xmm3 +0xc4,0xe2,0x51,0xde,0xca = vaesdec %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xde,0x18 = vaesdec (%eax), %xmm5, %xmm3 +0xc4,0xe2,0x51,0xdf,0xca = vaesdeclast %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xdf,0x18 = vaesdeclast (%eax), %xmm5, %xmm3 +0xc4,0xe3,0x79,0xdf,0xea,0x07 = vaeskeygenassist $7, %xmm2, %xmm5 +0xc4,0xe3,0x79,0xdf,0x28,0x07 = vaeskeygenassist $7, (%eax), %xmm5 +0xc5,0xe8,0xc2,0xd9,0x08 = vcmpeq_uqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x09 = vcmpngeps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x0a = vcmpngtps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x0b = vcmpfalseps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x0c = vcmpneq_oqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x0d = vcmpgeps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x0e = vcmpgtps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x0f = vcmptrueps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x10 = vcmpeq_osps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x11 = vcmplt_oqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x12 = vcmple_oqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x13 = vcmpunord_sps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x14 = vcmpneq_usps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x15 = vcmpnlt_uqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x16 = vcmpnle_uqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x17 = vcmpord_sps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x18 = vcmpeq_usps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x19 = vcmpnge_uqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x1a = vcmpngt_uqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x1b = vcmpfalse_osps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x1c = vcmpneq_osps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x1d = vcmpge_oqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x1e = vcmpgt_oqps %xmm1, %xmm2, %xmm3 +0xc5,0xe8,0xc2,0xd9,0x1f = vcmptrue_usps %xmm1, %xmm2, %xmm3 +0xc5,0xfc,0x28,0x10 = vmovaps (%eax), %ymm2 +0xc5,0xfc,0x28,0xd1 = vmovaps %ymm1, %ymm2 +0xc5,0xfc,0x29,0x08 = vmovaps %ymm1, (%eax) +0xc5,0xfd,0x28,0x10 = vmovapd (%eax), %ymm2 +0xc5,0xfd,0x28,0xd1 = vmovapd %ymm1, %ymm2 +0xc5,0xfd,0x29,0x08 = vmovapd %ymm1, (%eax) +0xc5,0xfc,0x10,0x10 = vmovups (%eax), %ymm2 +0xc5,0xfc,0x10,0xd1 = vmovups %ymm1, %ymm2 +0xc5,0xfc,0x11,0x08 = vmovups %ymm1, (%eax) +0xc5,0xfd,0x10,0x10 = vmovupd (%eax), %ymm2 +0xc5,0xfd,0x10,0xd1 = vmovupd %ymm1, %ymm2 +0xc5,0xfd,0x11,0x08 = vmovupd %ymm1, (%eax) +0xc5,0xec,0x15,0xe1 = vunpckhps %ymm1, %ymm2, %ymm4 +0xc5,0xed,0x15,0xe1 = vunpckhpd %ymm1, %ymm2, %ymm4 +0xc5,0xec,0x14,0xe1 = vunpcklps %ymm1, %ymm2, %ymm4 +0xc5,0xed,0x14,0xe1 = vunpcklpd %ymm1, %ymm2, %ymm4 +0xc5,0xec,0x15,0x6c,0xcb,0xfc = vunpckhps -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xed,0x15,0x6c,0xcb,0xfc = vunpckhpd -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xec,0x14,0x6c,0xcb,0xfc = vunpcklps -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xed,0x14,0x6c,0xcb,0xfc = vunpcklpd -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xfd,0xe7,0x08 = vmovntdq %ymm1, (%eax) +0xc5,0xfd,0x2b,0x08 = vmovntpd %ymm1, (%eax) +0xc5,0xfc,0x2b,0x08 = vmovntps %ymm1, (%eax) +0xc5,0xf8,0x50,0xc2 = vmovmskps %xmm2, %eax +0xc5,0xf9,0x50,0xc2 = vmovmskpd %xmm2, %eax +0xc5,0xdc,0x5f,0xf2 = vmaxps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x5f,0xf2 = vmaxpd %ymm2, %ymm4, %ymm6 +0xc5,0xdc,0x5d,0xf2 = vminps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x5d,0xf2 = vminpd %ymm2, %ymm4, %ymm6 +0xc5,0xdc,0x5c,0xf2 = vsubps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x5c,0xf2 = vsubpd %ymm2, %ymm4, %ymm6 +0xc5,0xdc,0x5e,0xf2 = vdivps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x5e,0xf2 = vdivpd %ymm2, %ymm4, %ymm6 +0xc5,0xdc,0x58,0xf2 = vaddps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x58,0xf2 = vaddpd %ymm2, %ymm4, %ymm6 +0xc5,0xdc,0x59,0xf2 = vmulps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x59,0xf2 = vmulpd %ymm2, %ymm4, %ymm6 +0xc5,0xdc,0x5f,0x30 = vmaxps (%eax), %ymm4, %ymm6 +0xc5,0xdd,0x5f,0x30 = vmaxpd (%eax), %ymm4, %ymm6 +0xc5,0xdc,0x5d,0x30 = vminps (%eax), %ymm4, %ymm6 +0xc5,0xdd,0x5d,0x30 = vminpd (%eax), %ymm4, %ymm6 +0xc5,0xdc,0x5c,0x30 = vsubps (%eax), %ymm4, %ymm6 +0xc5,0xdd,0x5c,0x30 = vsubpd (%eax), %ymm4, %ymm6 +0xc5,0xdc,0x5e,0x30 = vdivps (%eax), %ymm4, %ymm6 +0xc5,0xdd,0x5e,0x30 = vdivpd (%eax), %ymm4, %ymm6 +0xc5,0xdc,0x58,0x30 = vaddps (%eax), %ymm4, %ymm6 +0xc5,0xdd,0x58,0x30 = vaddpd (%eax), %ymm4, %ymm6 +0xc5,0xdc,0x59,0x30 = vmulps (%eax), %ymm4, %ymm6 +0xc5,0xdd,0x59,0x30 = vmulpd (%eax), %ymm4, %ymm6 +0xc5,0xfd,0x51,0xd1 = vsqrtpd %ymm1, %ymm2 +0xc5,0xfd,0x51,0x10 = vsqrtpd (%eax), %ymm2 +0xc5,0xfc,0x51,0xd1 = vsqrtps %ymm1, %ymm2 +0xc5,0xfc,0x51,0x10 = vsqrtps (%eax), %ymm2 +0xc5,0xfc,0x52,0xd1 = vrsqrtps %ymm1, %ymm2 +0xc5,0xfc,0x52,0x10 = vrsqrtps (%eax), %ymm2 +0xc5,0xfc,0x53,0xd1 = vrcpps %ymm1, %ymm2 +0xc5,0xfc,0x53,0x10 = vrcpps (%eax), %ymm2 +0xc5,0xdc,0x54,0xf2 = vandps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x54,0xf2 = vandpd %ymm2, %ymm4, %ymm6 +0xc5,0xec,0x54,0x6c,0xcb,0xfc = vandps -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xed,0x54,0x6c,0xcb,0xfc = vandpd -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xdc,0x56,0xf2 = vorps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x56,0xf2 = vorpd %ymm2, %ymm4, %ymm6 +0xc5,0xec,0x56,0x6c,0xcb,0xfc = vorps -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xed,0x56,0x6c,0xcb,0xfc = vorpd -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xdc,0x57,0xf2 = vxorps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x57,0xf2 = vxorpd %ymm2, %ymm4, %ymm6 +0xc5,0xec,0x57,0x6c,0xcb,0xfc = vxorps -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xed,0x57,0x6c,0xcb,0xfc = vxorpd -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xdc,0x55,0xf2 = vandnps %ymm2, %ymm4, %ymm6 +0xc5,0xdd,0x55,0xf2 = vandnpd %ymm2, %ymm4, %ymm6 +0xc5,0xec,0x55,0x6c,0xcb,0xfc = vandnps -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xed,0x55,0x6c,0xcb,0xfc = vandnpd -4(%ebx, %ecx, 8), %ymm2, %ymm5 +0xc5,0xfc,0x5a,0xd3 = vcvtps2pd %xmm3, %ymm2 +0xc5,0xfc,0x5a,0x10 = vcvtps2pd (%eax), %ymm2 +0xc5,0xfe,0xe6,0xd3 = vcvtdq2pd %xmm3, %ymm2 +0xc5,0xfe,0xe6,0x10 = vcvtdq2pd (%eax), %ymm2 +0xc5,0xfc,0x5b,0xea = vcvtdq2ps %ymm2, %ymm5 +0xc5,0xfc,0x5b,0x10 = vcvtdq2ps (%eax), %ymm2 +0xc5,0xfd,0x5b,0xea = vcvtps2dq %ymm2, %ymm5 +0xc5,0xfd,0x5b,0x28 = vcvtps2dq (%eax), %ymm5 +0xc5,0xfe,0x5b,0xea = vcvttps2dq %ymm2, %ymm5 +0xc5,0xfe,0x5b,0x28 = vcvttps2dq (%eax), %ymm5 +0xc5,0xf9,0xe6,0xe9 = vcvttpd2dq %xmm1, %xmm5 +0xc5,0xfd,0xe6,0xea = vcvttpd2dq %ymm2, %xmm5 +0xc5,0xf9,0xe6,0xe9 = vcvttpd2dq %xmm1, %xmm5 +0xc5,0xf9,0xe6,0x08 = vcvttpd2dqx (%eax), %xmm1 +0xc5,0xfd,0xe6,0xca = vcvttpd2dq %ymm2, %xmm1 +0xc5,0xfd,0xe6,0x08 = vcvttpd2dqy (%eax), %xmm1 +0xc5,0xfd,0x5a,0xea = vcvtpd2ps %ymm2, %xmm5 +0xc5,0xf9,0x5a,0xe9 = vcvtpd2ps %xmm1, %xmm5 +0xc5,0xf9,0x5a,0x08 = vcvtpd2psx (%eax), %xmm1 +0xc5,0xfd,0x5a,0xca = vcvtpd2ps %ymm2, %xmm1 +0xc5,0xfd,0x5a,0x08 = vcvtpd2psy (%eax), %xmm1 +0xc5,0xff,0xe6,0xea = vcvtpd2dq %ymm2, %xmm5 +0xc5,0xff,0xe6,0xca = vcvtpd2dq %ymm2, %xmm1 +0xc5,0xff,0xe6,0x08 = vcvtpd2dqy (%eax), %xmm1 +0xc5,0xfb,0xe6,0xe9 = vcvtpd2dq %xmm1, %xmm5 +0xc5,0xfb,0xe6,0x08 = vcvtpd2dqx (%eax), %xmm1 +0xc5,0xec,0xc2,0xd9,0x00 = vcmpeqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x02 = vcmpleps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x01 = vcmpltps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x04 = vcmpneqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x06 = vcmpnleps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x05 = vcmpnltps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x07 = vcmpordps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x03 = vcmpunordps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqps -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x02 = vcmpleps -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltps -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqps -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnleps -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltps -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xcc,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%ebx, %ecx, 8), %ymm6, %ymm2 +0xc5,0xec,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordps -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x00 = vcmpeqpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x02 = vcmplepd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x01 = vcmpltpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x04 = vcmpneqpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x06 = vcmpnlepd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x05 = vcmpnltpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x07 = vcmpordpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0xd9,0x03 = vcmpunordpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x00 = vcmpeqpd -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x02 = vcmplepd -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x01 = vcmpltpd -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x04 = vcmpneqpd -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x06 = vcmpnlepd -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x05 = vcmpnltpd -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xcd,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%ebx, %ecx, 8), %ymm6, %ymm2 +0xc5,0xed,0xc2,0x5c,0xcb,0xfc,0x03 = vcmpunordpd -4(%ebx, %ecx, 8), %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x08 = vcmpeq_uqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x09 = vcmpngeps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x0a = vcmpngtps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x0b = vcmpfalseps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x0c = vcmpneq_oqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x0d = vcmpgeps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x0e = vcmpgtps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x0f = vcmptrueps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x10 = vcmpeq_osps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x11 = vcmplt_oqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x12 = vcmple_oqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x13 = vcmpunord_sps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x14 = vcmpneq_usps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x15 = vcmpnlt_uqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x16 = vcmpnle_uqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x17 = vcmpord_sps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x18 = vcmpeq_usps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x19 = vcmpnge_uqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x1a = vcmpngt_uqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x1b = vcmpfalse_osps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x1c = vcmpneq_osps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x1d = vcmpge_oqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x1e = vcmpgt_oqps %ymm1, %ymm2, %ymm3 +0xc5,0xec,0xc2,0xd9,0x1f = vcmptrue_usps %ymm1, %ymm2, %ymm3 +0xc5,0xef,0xd0,0xd9 = vaddsubps %ymm1, %ymm2, %ymm3 +0xc5,0xf7,0xd0,0x10 = vaddsubps (%eax), %ymm1, %ymm2 +0xc5,0xed,0xd0,0xd9 = vaddsubpd %ymm1, %ymm2, %ymm3 +0xc5,0xf5,0xd0,0x10 = vaddsubpd (%eax), %ymm1, %ymm2 +0xc5,0xef,0x7c,0xd9 = vhaddps %ymm1, %ymm2, %ymm3 +0xc5,0xef,0x7c,0x18 = vhaddps (%eax), %ymm2, %ymm3 +0xc5,0xed,0x7c,0xd9 = vhaddpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0x7c,0x18 = vhaddpd (%eax), %ymm2, %ymm3 +0xc5,0xef,0x7d,0xd9 = vhsubps %ymm1, %ymm2, %ymm3 +0xc5,0xef,0x7d,0x18 = vhsubps (%eax), %ymm2, %ymm3 +0xc5,0xed,0x7d,0xd9 = vhsubpd %ymm1, %ymm2, %ymm3 +0xc5,0xed,0x7d,0x18 = vhsubpd (%eax), %ymm2, %ymm3 +0xc4,0xe3,0x55,0x0c,0xca,0x03 = vblendps $3, %ymm2, %ymm5, %ymm1 +0xc4,0xe3,0x55,0x0c,0x08,0x03 = vblendps $3, (%eax), %ymm5, %ymm1 +0xc4,0xe3,0x55,0x0d,0xca,0x03 = vblendpd $3, %ymm2, %ymm5, %ymm1 +0xc4,0xe3,0x55,0x0d,0x08,0x03 = vblendpd $3, (%eax), %ymm5, %ymm1 +0xc4,0xe3,0x55,0x40,0xca,0x03 = vdpps $3, %ymm2, %ymm5, %ymm1 +0xc4,0xe3,0x55,0x40,0x08,0x03 = vdpps $3, (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x7d,0x1a,0x10 = vbroadcastf128 (%eax), %ymm2 +0xc4,0xe2,0x7d,0x19,0x10 = vbroadcastsd (%eax), %ymm2 +0xc4,0xe2,0x79,0x18,0x10 = vbroadcastss (%eax), %xmm2 +0xc4,0xe2,0x7d,0x18,0x10 = vbroadcastss (%eax), %ymm2 +0xc4,0xe3,0x6d,0x18,0xea,0x07 = vinsertf128 $7, %xmm2, %ymm2, %ymm5 +0xc4,0xe3,0x6d,0x18,0x28,0x07 = vinsertf128 $7, (%eax), %ymm2, %ymm5 +0xc4,0xe3,0x7d,0x19,0xd2,0x07 = vextractf128 $7, %ymm2, %xmm2 +0xc4,0xe3,0x7d,0x19,0x10,0x07 = vextractf128 $7, %ymm2, (%eax) +0xc4,0xe2,0x51,0x2f,0x10 = vmaskmovpd %xmm2, %xmm5, (%eax) +0xc4,0xe2,0x55,0x2f,0x10 = vmaskmovpd %ymm2, %ymm5, (%eax) +0xc4,0xe2,0x69,0x2d,0x28 = vmaskmovpd (%eax), %xmm2, %xmm5 +0xc4,0xe2,0x6d,0x2d,0x28 = vmaskmovpd (%eax), %ymm2, %ymm5 +0xc4,0xe2,0x51,0x2e,0x10 = vmaskmovps %xmm2, %xmm5, (%eax) +0xc4,0xe2,0x55,0x2e,0x10 = vmaskmovps %ymm2, %ymm5, (%eax) +0xc4,0xe2,0x69,0x2c,0x28 = vmaskmovps (%eax), %xmm2, %xmm5 +0xc4,0xe2,0x6d,0x2c,0x28 = vmaskmovps (%eax), %ymm2, %ymm5 +0xc4,0xe3,0x79,0x04,0xe9,0x07 = vpermilps $7, %xmm1, %xmm5 +0xc4,0xe3,0x7d,0x04,0xcd,0x07 = vpermilps $7, %ymm5, %ymm1 +0xc4,0xe3,0x79,0x04,0x28,0x07 = vpermilps $7, (%eax), %xmm5 +0xc4,0xe3,0x7d,0x04,0x28,0x07 = vpermilps $7, (%eax), %ymm5 +0xc4,0xe2,0x51,0x0c,0xc9 = vpermilps %xmm1, %xmm5, %xmm1 +0xc4,0xe2,0x55,0x0c,0xc9 = vpermilps %ymm1, %ymm5, %ymm1 +0xc4,0xe2,0x51,0x0c,0x18 = vpermilps (%eax), %xmm5, %xmm3 +0xc4,0xe2,0x55,0x0c,0x08 = vpermilps (%eax), %ymm5, %ymm1 +0xc4,0xe3,0x79,0x05,0xe9,0x07 = vpermilpd $7, %xmm1, %xmm5 +0xc4,0xe3,0x7d,0x05,0xcd,0x07 = vpermilpd $7, %ymm5, %ymm1 +0xc4,0xe3,0x79,0x05,0x28,0x07 = vpermilpd $7, (%eax), %xmm5 +0xc4,0xe3,0x7d,0x05,0x28,0x07 = vpermilpd $7, (%eax), %ymm5 +0xc4,0xe2,0x51,0x0d,0xc9 = vpermilpd %xmm1, %xmm5, %xmm1 +0xc4,0xe2,0x55,0x0d,0xc9 = vpermilpd %ymm1, %ymm5, %ymm1 +0xc4,0xe2,0x51,0x0d,0x18 = vpermilpd (%eax), %xmm5, %xmm3 +0xc4,0xe2,0x55,0x0d,0x08 = vpermilpd (%eax), %ymm5, %ymm1 +0xc4,0xe3,0x55,0x06,0xca,0x07 = vperm2f128 $7, %ymm2, %ymm5, %ymm1 +0xc4,0xe3,0x55,0x06,0x08,0x07 = vperm2f128 $7, (%eax), %ymm5, %ymm1 +0xc5,0xfc,0x77 = vzeroall +0xc5,0xf8,0x77 = vzeroupper +0xc5,0xfb,0x2d,0xcc = vcvtsd2si %xmm4, %ecx +0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%ecx), %ecx +0xc5,0xfb,0x2d,0xcc = vcvtsd2si %xmm4, %ecx +0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%ecx), %ecx +0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %xmm0, %xmm7 +0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %xmm0, %xmm7 +0xc5,0xfb,0x2a,0x7d,0x00 = vcvtsi2sdl (%ebp), %xmm0, %xmm7 +0xc5,0xfb,0x2a,0x3c,0x24 = vcvtsi2sdl (%esp), %xmm0, %xmm7 +0xc5,0xff,0xf0,0x10 = vlddqu (%eax), %ymm2 +0xc5,0xff,0x12,0xea = vmovddup %ymm2, %ymm5 +0xc5,0xff,0x12,0x10 = vmovddup (%eax), %ymm2 +0xc5,0xfd,0x6f,0xea = vmovdqa %ymm2, %ymm5 +0xc5,0xfd,0x7f,0x10 = vmovdqa %ymm2, (%eax) +0xc5,0xfd,0x6f,0x10 = vmovdqa (%eax), %ymm2 +0xc5,0xfe,0x6f,0xea = vmovdqu %ymm2, %ymm5 +0xc5,0xfe,0x7f,0x10 = vmovdqu %ymm2, (%eax) +0xc5,0xfe,0x6f,0x10 = vmovdqu (%eax), %ymm2 +0xc5,0xfe,0x16,0xea = vmovshdup %ymm2, %ymm5 +0xc5,0xfe,0x16,0x10 = vmovshdup (%eax), %ymm2 +0xc5,0xfe,0x12,0xea = vmovsldup %ymm2, %ymm5 +0xc5,0xfe,0x12,0x10 = vmovsldup (%eax), %ymm2 +0xc4,0xe2,0x7d,0x17,0xea = vptest %ymm2, %ymm5 +0xc4,0xe2,0x7d,0x17,0x10 = vptest (%eax), %ymm2 +0xc4,0xe3,0x7d,0x09,0xcd,0x07 = vroundpd $7, %ymm5, %ymm1 +0xc4,0xe3,0x7d,0x09,0x28,0x07 = vroundpd $7, (%eax), %ymm5 +0xc4,0xe3,0x7d,0x08,0xcd,0x07 = vroundps $7, %ymm5, %ymm1 +0xc4,0xe3,0x7d,0x08,0x28,0x07 = vroundps $7, (%eax), %ymm5 +0xc5,0xd5,0xc6,0xca,0x07 = vshufpd $7, %ymm2, %ymm5, %ymm1 +0xc5,0xd5,0xc6,0x08,0x07 = vshufpd $7, (%eax), %ymm5, %ymm1 +0xc5,0xd4,0xc6,0xca,0x07 = vshufps $7, %ymm2, %ymm5, %ymm1 +0xc5,0xd4,0xc6,0x08,0x07 = vshufps $7, (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x79,0x0f,0xea = vtestpd %xmm2, %xmm5 +0xc4,0xe2,0x7d,0x0f,0xea = vtestpd %ymm2, %ymm5 +0xc4,0xe2,0x79,0x0f,0x10 = vtestpd (%eax), %xmm2 +0xc4,0xe2,0x7d,0x0f,0x10 = vtestpd (%eax), %ymm2 +0xc4,0xe2,0x79,0x0e,0xea = vtestps %xmm2, %xmm5 +0xc4,0xe2,0x7d,0x0e,0xea = vtestps %ymm2, %ymm5 +0xc4,0xe2,0x79,0x0e,0x10 = vtestps (%eax), %xmm2 +0xc4,0xe2,0x7d,0x0e,0x10 = vtestps (%eax), %ymm2 +0xc4,0xe3,0x75,0x4b,0x94,0x20,0xad,0xde,0x00,0x00,0x00 = vblendvpd %ymm0, 0xdead(%eax), %ymm1, %ymm2 +// 0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulhqhqdq %xmm2, %xmm5, %xmm1 +// 0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulhqhqdq (%eax), %xmm5, %xmm3 +// 0xc4,0xe3,0x51,0x44,0xca,0x01 = vpclmulhqlqdq %xmm2, %xmm5, %xmm1 +// 0xc4,0xe3,0x51,0x44,0x18,0x01 = vpclmulhqlqdq (%eax), %xmm5, %xmm3 +// 0xc4,0xe3,0x51,0x44,0xca,0x10 = vpclmullqhqdq %xmm2, %xmm5, %xmm1 +// 0xc4,0xe3,0x51,0x44,0x18,0x10 = vpclmullqhqdq (%eax), %xmm5, %xmm3 +// 0xc4,0xe3,0x51,0x44,0xca,0x00 = vpclmullqlqdq %xmm2, %xmm5, %xmm1 +// 0xc4,0xe3,0x51,0x44,0x18,0x00 = vpclmullqlqdq (%eax), %xmm5, %xmm3 +0xc4,0xe3,0x51,0x44,0xca,0x11 = vpclmulqdq $17, %xmm2, %xmm5, %xmm1 +0xc4,0xe3,0x51,0x44,0x18,0x11 = vpclmulqdq $17, (%eax), %xmm5, %xmm3 diff --git a/capstone/suite/MC/X86/x86-32-fma3.s.cs b/capstone/suite/MC/X86/x86-32-fma3.s.cs new file mode 100644 index 000000000..b1bc8db56 --- /dev/null +++ b/capstone/suite/MC/X86/x86-32-fma3.s.cs @@ -0,0 +1,169 @@ +# CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT +0xc4,0xe2,0xd1,0x98,0xca = vfmadd132pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x98,0x08 = vfmadd132pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0x98,0xca = vfmadd132ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x98,0x08 = vfmadd132ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa8,0xca = vfmadd213pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa8,0x08 = vfmadd213pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa8,0xca = vfmadd213ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa8,0x08 = vfmadd213ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb8,0xca = vfmadd231pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb8,0x08 = vfmadd231pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb8,0xca = vfmadd231ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb8,0x08 = vfmadd231ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd5,0x98,0xca = vfmadd132pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x98,0x08 = vfmadd132pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0x98,0xca = vfmadd132ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0x98,0x08 = vfmadd132ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa8,0xca = vfmadd213pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa8,0x08 = vfmadd213pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa8,0xca = vfmadd213ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa8,0x08 = vfmadd213ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb8,0xca = vfmadd231pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb8,0x08 = vfmadd231pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb8,0xca = vfmadd231ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb8,0x08 = vfmadd231ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd1,0x98,0xca = vfmadd132pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x98,0x08 = vfmadd132pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0x98,0xca = vfmadd132ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x98,0x08 = vfmadd132ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa8,0xca = vfmadd213pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa8,0x08 = vfmadd213pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa8,0xca = vfmadd213ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa8,0x08 = vfmadd213ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb8,0xca = vfmadd231pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb8,0x08 = vfmadd231pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb8,0xca = vfmadd231ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb8,0x08 = vfmadd231ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x96,0xca = vfmaddsub132pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x96,0x08 = vfmaddsub132pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0x96,0xca = vfmaddsub132ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x96,0x08 = vfmaddsub132ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa6,0xca = vfmaddsub213pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa6,0x08 = vfmaddsub213pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa6,0xca = vfmaddsub213ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa6,0x08 = vfmaddsub213ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb6,0xca = vfmaddsub231pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb6,0x08 = vfmaddsub231pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb6,0xca = vfmaddsub231ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb6,0x08 = vfmaddsub231ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x97,0xca = vfmsubadd132pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x97,0x08 = vfmsubadd132pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0x97,0xca = vfmsubadd132ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x97,0x08 = vfmsubadd132ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa7,0xca = vfmsubadd213pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xa7,0x08 = vfmsubadd213pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa7,0xca = vfmsubadd213ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xa7,0x08 = vfmsubadd213ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb7,0xca = vfmsubadd231pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xb7,0x08 = vfmsubadd231pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb7,0xca = vfmsubadd231ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xb7,0x08 = vfmsubadd231ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x9a,0xca = vfmsub132pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x9a,0x08 = vfmsub132pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0x9a,0xca = vfmsub132ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x9a,0x08 = vfmsub132ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xaa,0xca = vfmsub213pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xaa,0x08 = vfmsub213pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xaa,0xca = vfmsub213ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xaa,0x08 = vfmsub213ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xba,0xca = vfmsub231pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xba,0x08 = vfmsub231pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xba,0xca = vfmsub231ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xba,0x08 = vfmsub231ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x9c,0xca = vfnmadd132pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x9c,0x08 = vfnmadd132pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0x9c,0xca = vfnmadd132ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x9c,0x08 = vfnmadd132ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xac,0xca = vfnmadd213pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xac,0x08 = vfnmadd213pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xac,0xca = vfnmadd213ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xac,0x08 = vfnmadd213ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xbc,0xca = vfnmadd231pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xbc,0x08 = vfnmadd231pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xbc,0xca = vfnmadd231ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xbc,0x08 = vfnmadd231ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x9e,0xca = vfnmsub132pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0x9e,0x08 = vfnmsub132pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0x9e,0xca = vfnmsub132ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0x9e,0x08 = vfnmsub132ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xae,0xca = vfnmsub213pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xae,0x08 = vfnmsub213pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xae,0xca = vfnmsub213ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xae,0x08 = vfnmsub213ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xbe,0xca = vfnmsub231pd %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0xd1,0xbe,0x08 = vfnmsub231pd (%eax), %xmm5, %xmm1 +0xc4,0xe2,0x51,0xbe,0xca = vfnmsub231ps %xmm2, %xmm5, %xmm1 +0xc4,0xe2,0x51,0xbe,0x08 = vfnmsub231ps (%eax), %xmm5, %xmm1 +0xc4,0xe2,0xd5,0x98,0xca = vfmadd132pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x98,0x08 = vfmadd132pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0x98,0xca = vfmadd132ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0x98,0x08 = vfmadd132ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa8,0xca = vfmadd213pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa8,0x08 = vfmadd213pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa8,0xca = vfmadd213ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa8,0x08 = vfmadd213ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb8,0xca = vfmadd231pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb8,0x08 = vfmadd231pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb8,0xca = vfmadd231ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb8,0x08 = vfmadd231ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x96,0xca = vfmaddsub132pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x96,0x08 = vfmaddsub132pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0x96,0xca = vfmaddsub132ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0x96,0x08 = vfmaddsub132ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa6,0xca = vfmaddsub213pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa6,0x08 = vfmaddsub213pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa6,0xca = vfmaddsub213ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa6,0x08 = vfmaddsub213ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb6,0xca = vfmaddsub231pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb6,0x08 = vfmaddsub231pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb6,0xca = vfmaddsub231ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb6,0x08 = vfmaddsub231ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x97,0xca = vfmsubadd132pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x97,0x08 = vfmsubadd132pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0x97,0xca = vfmsubadd132ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0x97,0x08 = vfmsubadd132ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa7,0xca = vfmsubadd213pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xa7,0x08 = vfmsubadd213pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa7,0xca = vfmsubadd213ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xa7,0x08 = vfmsubadd213ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb7,0xca = vfmsubadd231pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xb7,0x08 = vfmsubadd231pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb7,0xca = vfmsubadd231ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xb7,0x08 = vfmsubadd231ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x9a,0xca = vfmsub132pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x9a,0x08 = vfmsub132pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0x9a,0xca = vfmsub132ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0x9a,0x08 = vfmsub132ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xaa,0xca = vfmsub213pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xaa,0x08 = vfmsub213pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xaa,0xca = vfmsub213ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xaa,0x08 = vfmsub213ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xba,0xca = vfmsub231pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xba,0x08 = vfmsub231pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xba,0xca = vfmsub231ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xba,0x08 = vfmsub231ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x9c,0xca = vfnmadd132pd %ymm2, %ymm5, %ymm1 +// 0xc4,0xe2,0xd5,0x9c,0x08 = vfnmadd132pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0x9c,0xca = vfnmadd132ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0x9c,0x08 = vfnmadd132ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xac,0xca = vfnmadd213pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xac,0x08 = vfnmadd213pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xac,0xca = vfnmadd213ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xac,0x08 = vfnmadd213ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xbc,0xca = vfnmadd231pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xbc,0x08 = vfnmadd231pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xbc,0xca = vfnmadd231ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xbc,0x08 = vfnmadd231ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x9e,0xca = vfnmsub132pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0x9e,0x08 = vfnmsub132pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0x9e,0xca = vfnmsub132ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0x9e,0x08 = vfnmsub132ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xae,0xca = vfnmsub213pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xae,0x08 = vfnmsub213pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xae,0xca = vfnmsub213ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xae,0x08 = vfnmsub213ps (%eax), %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xbe,0xca = vfnmsub231pd %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0xd5,0xbe,0x08 = vfnmsub231pd (%eax), %ymm5, %ymm1 +0xc4,0xe2,0x55,0xbe,0xca = vfnmsub231ps %ymm2, %ymm5, %ymm1 +0xc4,0xe2,0x55,0xbe,0x08 = vfnmsub231ps (%eax), %ymm5, %ymm1 diff --git a/capstone/suite/MC/X86/x86-32-ms-inline-asm.s.cs b/capstone/suite/MC/X86/x86-32-ms-inline-asm.s.cs new file mode 100644 index 000000000..6c201df97 --- /dev/null +++ b/capstone/suite/MC/X86/x86-32-ms-inline-asm.s.cs @@ -0,0 +1,27 @@ +# CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT +0x8b,0x03 = movl (%ebx), %eax +0x89,0x4b,0x04 = movl %ecx, 4(%ebx) +0x8b,0x04,0x85,0x04,0x00,0x00,0x00 = movl 4(, %eax, 4), %eax +0x8b,0x04,0x85,0x04,0x00,0x00,0x00 = movl 4(, %eax, 4), %eax +0x8b,0x04,0x06 = movl (%esi, %eax), %eax +0x8b,0x04,0x06 = movl (%esi, %eax), %eax +0x8b,0x04,0x86 = movl (%esi, %eax, 4), %eax +0x8b,0x04,0x86 = movl (%esi, %eax, 4), %eax +0x8b,0x44,0x06,0x04 = movl 4(%esi, %eax), %eax +0x8b,0x44,0x06,0x04 = movl 4(%esi, %eax), %eax +0x8b,0x44,0x06,0x04 = movl 4(%esi, %eax), %eax +0x8b,0x44,0x06,0x04 = movl 4(%esi, %eax), %eax +0x8b,0x44,0x46,0x04 = movl 4(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x04 = movl 4(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x04 = movl 4(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x04 = movl 4(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x08 = movl 8(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x08 = movl 8(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x08 = movl 8(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x08 = movl 8(%esi, %eax, 2), %eax +0x8b,0x44,0x46,0x10 = movl 16(%esi, %eax, 2), %eax +0x0f,0x18,0x40,0x40 = prefetchnta 64(%eax) +0x60 = pushal +0x61 = popal +0x60 = pushal +0x61 = popal diff --git a/capstone/suite/MC/X86/x86_64-avx-clmul-encoding.s.cs b/capstone/suite/MC/X86/x86_64-avx-clmul-encoding.s.cs new file mode 100644 index 000000000..e5d631e89 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-avx-clmul-encoding.s.cs @@ -0,0 +1,11 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +// 0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulhqhqdq %xmm12, %xmm10, %xmm11 +// 0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulhqhqdq (%rax), %xmm10, %xmm13 +// 0xc4,0x43,0x29,0x44,0xdc,0x01 = vpclmulhqlqdq %xmm12, %xmm10, %xmm11 +// 0xc4,0x63,0x29,0x44,0x28,0x01 = vpclmulhqlqdq (%rax), %xmm10, %xmm13 +// 0xc4,0x43,0x29,0x44,0xdc,0x10 = vpclmullqhqdq %xmm12, %xmm10, %xmm11 +// 0xc4,0x63,0x29,0x44,0x28,0x10 = vpclmullqhqdq (%rax), %xmm10, %xmm13 +// 0xc4,0x43,0x29,0x44,0xdc,0x00 = vpclmullqlqdq %xmm12, %xmm10, %xmm11 +// 0xc4,0x63,0x29,0x44,0x28,0x00 = vpclmullqlqdq (%rax), %xmm10, %xmm13 +0xc4,0x43,0x29,0x44,0xdc,0x11 = vpclmulqdq $17, %xmm12, %xmm10, %xmm11 +0xc4,0x63,0x29,0x44,0x28,0x11 = vpclmulqdq $17, (%rax), %xmm10, %xmm13 diff --git a/capstone/suite/MC/X86/x86_64-avx-encoding.s.cs b/capstone/suite/MC/X86/x86_64-avx-encoding.s.cs new file mode 100644 index 000000000..13d688400 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-avx-encoding.s.cs @@ -0,0 +1,1058 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0xc4,0x41,0x32,0x58,0xd0 = vaddss %xmm8, %xmm9, %xmm10 +0xc4,0x41,0x32,0x59,0xd0 = vmulss %xmm8, %xmm9, %xmm10 +0xc4,0x41,0x32,0x5c,0xd0 = vsubss %xmm8, %xmm9, %xmm10 +0xc4,0x41,0x32,0x5e,0xd0 = vdivss %xmm8, %xmm9, %xmm10 +0xc4,0x41,0x33,0x58,0xd0 = vaddsd %xmm8, %xmm9, %xmm10 +0xc4,0x41,0x33,0x59,0xd0 = vmulsd %xmm8, %xmm9, %xmm10 +0xc4,0x41,0x33,0x5c,0xd0 = vsubsd %xmm8, %xmm9, %xmm10 +0xc4,0x41,0x33,0x5e,0xd0 = vdivsd %xmm8, %xmm9, %xmm10 +0xc5,0x2a,0x58,0x5c,0xd9,0xfc = vaddss -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x2a,0x5c,0x5c,0xd9,0xfc = vsubss -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x2a,0x59,0x5c,0xd9,0xfc = vmulss -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x2a,0x5e,0x5c,0xd9,0xfc = vdivss -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x2b,0x58,0x5c,0xd9,0xfc = vaddsd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x2b,0x5c,0x5c,0xd9,0xfc = vsubsd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x2b,0x59,0x5c,0xd9,0xfc = vmulsd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x2b,0x5e,0x5c,0xd9,0xfc = vdivsd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc4,0x41,0x20,0x58,0xfa = vaddps %xmm10, %xmm11, %xmm15 +0xc4,0x41,0x20,0x5c,0xfa = vsubps %xmm10, %xmm11, %xmm15 +0xc4,0x41,0x20,0x59,0xfa = vmulps %xmm10, %xmm11, %xmm15 +0xc4,0x41,0x20,0x5e,0xfa = vdivps %xmm10, %xmm11, %xmm15 +0xc4,0x41,0x21,0x58,0xfa = vaddpd %xmm10, %xmm11, %xmm15 +0xc4,0x41,0x21,0x5c,0xfa = vsubpd %xmm10, %xmm11, %xmm15 +0xc4,0x41,0x21,0x59,0xfa = vmulpd %xmm10, %xmm11, %xmm15 +0xc4,0x41,0x21,0x5e,0xfa = vdivpd %xmm10, %xmm11, %xmm15 +0xc5,0x28,0x58,0x5c,0xd9,0xfc = vaddps -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x28,0x5c,0x5c,0xd9,0xfc = vsubps -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x28,0x59,0x5c,0xd9,0xfc = vmulps -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x28,0x5e,0x5c,0xd9,0xfc = vdivps -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x29,0x58,0x5c,0xd9,0xfc = vaddpd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x29,0x5c,0x5c,0xd9,0xfc = vsubpd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x29,0x59,0x5c,0xd9,0xfc = vmulpd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc5,0x29,0x5e,0x5c,0xd9,0xfc = vdivpd -4(%rcx, %rbx, 8), %xmm10, %xmm11 +0xc4,0x41,0x0a,0x5f,0xe2 = vmaxss %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x0b,0x5f,0xe2 = vmaxsd %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x0a,0x5d,0xe2 = vminss %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x0b,0x5d,0xe2 = vminsd %xmm10, %xmm14, %xmm12 +0xc5,0x1a,0x5f,0x54,0xcb,0xfc = vmaxss -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x1b,0x5f,0x54,0xcb,0xfc = vmaxsd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x1a,0x5d,0x54,0xcb,0xfc = vminss -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x1b,0x5d,0x54,0xcb,0xfc = vminsd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc4,0x41,0x08,0x5f,0xe2 = vmaxps %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x09,0x5f,0xe2 = vmaxpd %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x08,0x5d,0xe2 = vminps %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x09,0x5d,0xe2 = vminpd %xmm10, %xmm14, %xmm12 +0xc5,0x18,0x5f,0x54,0xcb,0xfc = vmaxps -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x19,0x5f,0x54,0xcb,0xfc = vmaxpd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x18,0x5d,0x54,0xcb,0xfc = vminps -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x19,0x5d,0x54,0xcb,0xfc = vminpd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc4,0x41,0x08,0x54,0xe2 = vandps %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x09,0x54,0xe2 = vandpd %xmm10, %xmm14, %xmm12 +0xc5,0x18,0x54,0x54,0xcb,0xfc = vandps -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x19,0x54,0x54,0xcb,0xfc = vandpd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc4,0x41,0x08,0x56,0xe2 = vorps %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x09,0x56,0xe2 = vorpd %xmm10, %xmm14, %xmm12 +0xc5,0x18,0x56,0x54,0xcb,0xfc = vorps -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x19,0x56,0x54,0xcb,0xfc = vorpd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc4,0x41,0x08,0x57,0xe2 = vxorps %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x09,0x57,0xe2 = vxorpd %xmm10, %xmm14, %xmm12 +0xc5,0x18,0x57,0x54,0xcb,0xfc = vxorps -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x19,0x57,0x54,0xcb,0xfc = vxorpd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc4,0x41,0x08,0x55,0xe2 = vandnps %xmm10, %xmm14, %xmm12 +0xc4,0x41,0x09,0x55,0xe2 = vandnpd %xmm10, %xmm14, %xmm12 +0xc5,0x18,0x55,0x54,0xcb,0xfc = vandnps -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x19,0x55,0x54,0xcb,0xfc = vandnpd -4(%rbx, %rcx, 8), %xmm12, %xmm10 +0xc5,0x7a,0x10,0x54,0xcb,0xfc = vmovss -4(%rbx, %rcx, 8), %xmm10 +0xc4,0x41,0x2a,0x10,0xfe = vmovss %xmm14, %xmm10, %xmm15 +0xc5,0x7b,0x10,0x54,0xcb,0xfc = vmovsd -4(%rbx, %rcx, 8), %xmm10 +0xc4,0x41,0x2b,0x10,0xfe = vmovsd %xmm14, %xmm10, %xmm15 +0xc4,0x41,0x18,0x15,0xef = vunpckhps %xmm15, %xmm12, %xmm13 +0xc4,0x41,0x19,0x15,0xef = vunpckhpd %xmm15, %xmm12, %xmm13 +0xc4,0x41,0x18,0x14,0xef = vunpcklps %xmm15, %xmm12, %xmm13 +0xc4,0x41,0x19,0x14,0xef = vunpcklpd %xmm15, %xmm12, %xmm13 +0xc5,0x18,0x15,0x7c,0xcb,0xfc = vunpckhps -4(%rbx, %rcx, 8), %xmm12, %xmm15 +0xc5,0x19,0x15,0x7c,0xcb,0xfc = vunpckhpd -4(%rbx, %rcx, 8), %xmm12, %xmm15 +0xc5,0x18,0x14,0x7c,0xcb,0xfc = vunpcklps -4(%rbx, %rcx, 8), %xmm12, %xmm15 +0xc5,0x19,0x14,0x7c,0xcb,0xfc = vunpcklpd -4(%rbx, %rcx, 8), %xmm12, %xmm15 +0xc4,0x41,0x18,0xc2,0xfa,0x00 = vcmpeqps %xmm10, %xmm12, %xmm15 +0xc5,0x18,0xc2,0x38,0x00 = vcmpeqps (%rax), %xmm12, %xmm15 +0xc4,0x41,0x18,0xc2,0xfa,0x07 = vcmpordps %xmm10, %xmm12, %xmm15 +0xc4,0x41,0x19,0xc2,0xfa,0x00 = vcmpeqpd %xmm10, %xmm12, %xmm15 +0xc5,0x19,0xc2,0x38,0x00 = vcmpeqpd (%rax), %xmm12, %xmm15 +0xc4,0x41,0x19,0xc2,0xfa,0x07 = vcmpordpd %xmm10, %xmm12, %xmm15 +0xc4,0x41,0x18,0xc6,0xeb,0x08 = vshufps $8, %xmm11, %xmm12, %xmm13 +0xc5,0x18,0xc6,0x6c,0xcb,0xfc,0x08 = vshufps $8, -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x19,0xc6,0xeb,0x08 = vshufpd $8, %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xc6,0x6c,0xcb,0xfc,0x08 = vshufpd $8, -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x00 = vcmpeqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x02 = vcmpleps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x01 = vcmpltps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x04 = vcmpneqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x06 = vcmpnleps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x05 = vcmpnltps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x07 = vcmpordps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x03 = vcmpunordps %xmm11, %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x02 = vcmpleps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnleps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordps -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x00 = vcmpeqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x02 = vcmplepd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x01 = vcmpltpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x04 = vcmpneqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x06 = vcmpnlepd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x05 = vcmpnltpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x07 = vcmpordpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x03 = vcmpunordpd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x02 = vcmplepd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnlepd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordpd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x00 = vcmpeqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x02 = vcmpless %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x01 = vcmpltss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x04 = vcmpneqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x06 = vcmpnless %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x05 = vcmpnltss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x07 = vcmpordss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x03 = vcmpunordss %xmm11, %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x02 = vcmpless -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnless -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordss -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x00 = vcmpeqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x02 = vcmplesd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x01 = vcmpltsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x04 = vcmpneqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x06 = vcmpnlesd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x05 = vcmpnltsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x07 = vcmpordsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x03 = vcmpunordsd %xmm11, %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x02 = vcmplesd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnlesd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x07 = vcmpordsd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x08 = vcmpeq_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x09 = vcmpngeps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0a = vcmpngtps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0b = vcmpfalseps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0c = vcmpneq_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0d = vcmpgeps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0e = vcmpgtps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0f = vcmptrueps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x10 = vcmpeq_osps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x11 = vcmplt_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x12 = vcmple_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x13 = vcmpunord_sps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x14 = vcmpneq_usps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x15 = vcmpnlt_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x16 = vcmpnle_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x17 = vcmpord_sps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x18 = vcmpeq_usps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x19 = vcmpnge_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1a = vcmpngt_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1b = vcmpfalse_osps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1c = vcmpneq_osps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1d = vcmpge_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1e = vcmpgt_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1f = vcmptrue_usps %xmm11, %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngeps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalseps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgeps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtps -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptrueps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_osps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x11 = vcmplt_oqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x13 = vcmpunord_sps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_usps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x15 = vcmpnlt_uqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqps -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x17 = vcmpord_sps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_usps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x19 = vcmpnge_uqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_osps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_osps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc8,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqps -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x18,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_usps -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x08 = vcmpeq_uqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x09 = vcmpngepd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x0a = vcmpngtpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x0b = vcmpfalsepd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x0c = vcmpneq_oqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x0d = vcmpgepd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x0e = vcmpgtpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x0f = vcmptruepd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x10 = vcmpeq_ospd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x11 = vcmplt_oqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x12 = vcmple_oqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x13 = vcmpunord_spd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x14 = vcmpneq_uspd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x15 = vcmpnlt_uqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x16 = vcmpnle_uqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x17 = vcmpord_spd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x18 = vcmpeq_uspd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x19 = vcmpnge_uqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x1a = vcmpngt_uqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x1b = vcmpfalse_ospd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x1c = vcmpneq_ospd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x1d = vcmpge_oqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x1e = vcmpgt_oqpd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x19,0xc2,0xeb,0x1f = vcmptrue_uspd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngepd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalsepd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgepd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtpd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptruepd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_ospd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x11 = vcmplt_oqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x13 = vcmpunord_spd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_uspd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x15 = vcmpnlt_uqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqpd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x17 = vcmpord_spd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_uspd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x19 = vcmpnge_uqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_ospd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_ospd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqpd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xc9,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqpd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x19,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_uspd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x08 = vcmpeq_uqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x09 = vcmpngess %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x0a = vcmpngtss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x0b = vcmpfalsess %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x0c = vcmpneq_oqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x0d = vcmpgess %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x0e = vcmpgtss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x0f = vcmptruess %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x10 = vcmpeq_osss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x11 = vcmplt_oqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x12 = vcmple_oqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x13 = vcmpunord_sss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x14 = vcmpneq_usss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x15 = vcmpnlt_uqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x16 = vcmpnle_uqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x17 = vcmpord_sss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x18 = vcmpeq_usss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x19 = vcmpnge_uqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x1a = vcmpngt_uqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x1b = vcmpfalse_osss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x1c = vcmpneq_osss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x1d = vcmpge_oqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x1e = vcmpgt_oqss %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1a,0xc2,0xeb,0x1f = vcmptrue_usss %xmm11, %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngess -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalsess -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgess -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtss -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptruess -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_osss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x11 = vcmplt_oqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x13 = vcmpunord_sss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_usss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x15 = vcmpnlt_uqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqss -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x17 = vcmpord_sss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_usss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x19 = vcmpnge_uqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_osss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_osss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xca,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqss -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1a,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_usss -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x08 = vcmpeq_uqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x09 = vcmpngesd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x0a = vcmpngtsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x0b = vcmpfalsesd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x0c = vcmpneq_oqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x0d = vcmpgesd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x0e = vcmpgtsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x0f = vcmptruesd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x10 = vcmpeq_ossd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x11 = vcmplt_oqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x12 = vcmple_oqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x13 = vcmpunord_ssd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x14 = vcmpneq_ussd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x15 = vcmpnlt_uqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x16 = vcmpnle_uqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x17 = vcmpord_ssd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x18 = vcmpeq_ussd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x19 = vcmpnge_uqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x1a = vcmpngt_uqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x1b = vcmpfalse_ossd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x1c = vcmpneq_ossd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x1d = vcmpge_oqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x1e = vcmpgt_oqsd %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x1b,0xc2,0xeb,0x1f = vcmptrue_ussd %xmm11, %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x08 = vcmpeq_uqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x09 = vcmpngesd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0a = vcmpngtsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0b = vcmpfalsesd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0c = vcmpneq_oqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0d = vcmpgesd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x0e = vcmpgtsd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x0f = vcmptruesd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x10 = vcmpeq_ossd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x11 = vcmplt_oqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x12 = vcmple_oqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x13 = vcmpunord_ssd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x14 = vcmpneq_ussd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x15 = vcmpnlt_uqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x16 = vcmpnle_uqsd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x17 = vcmpord_ssd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x18 = vcmpeq_ussd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x19 = vcmpnge_uqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1a = vcmpngt_uqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1b = vcmpfalse_ossd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1c = vcmpneq_ossd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1d = vcmpge_oqsd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc5,0xcb,0xc2,0x54,0xcb,0xfc,0x1e = vcmpgt_oqsd -4(%rbx, %rcx, 8), %xmm6, %xmm2 +0xc5,0x1b,0xc2,0x6c,0xcb,0xfc,0x1f = vcmptrue_ussd -4(%rbx, %rcx, 8), %xmm12, %xmm13 +0xc4,0x41,0x78,0x2e,0xe3 = vucomiss %xmm11, %xmm12 +0xc5,0x78,0x2e,0x20 = vucomiss (%rax), %xmm12 +0xc4,0x41,0x78,0x2f,0xe3 = vcomiss %xmm11, %xmm12 +0xc5,0x78,0x2f,0x20 = vcomiss (%rax), %xmm12 +0xc4,0x41,0x79,0x2e,0xe3 = vucomisd %xmm11, %xmm12 +0xc5,0x79,0x2e,0x20 = vucomisd (%rax), %xmm12 +0xc4,0x41,0x79,0x2f,0xe3 = vcomisd %xmm11, %xmm12 +0xc5,0x79,0x2f,0x20 = vcomisd (%rax), %xmm12 +0xc5,0xfa,0x2c,0x01 = vcvttss2si (%rcx), %eax +0xc5,0x22,0x2a,0x20 = vcvtsi2ssl (%rax), %xmm11, %xmm12 +0xc5,0x22,0x2a,0x20 = vcvtsi2ssl (%rax), %xmm11, %xmm12 +0xc5,0xfb,0x2c,0x01 = vcvttsd2si (%rcx), %eax +0xc5,0x23,0x2a,0x20 = vcvtsi2sdl (%rax), %xmm11, %xmm12 +0xc5,0x23,0x2a,0x20 = vcvtsi2sdl (%rax), %xmm11, %xmm12 +0xc5,0x78,0x28,0x20 = vmovaps (%rax), %xmm12 +0xc4,0x41,0x78,0x28,0xe3 = vmovaps %xmm11, %xmm12 +0xc5,0x78,0x29,0x18 = vmovaps %xmm11, (%rax) +0xc5,0x79,0x28,0x20 = vmovapd (%rax), %xmm12 +0xc4,0x41,0x79,0x28,0xe3 = vmovapd %xmm11, %xmm12 +0xc5,0x79,0x29,0x18 = vmovapd %xmm11, (%rax) +0xc5,0x78,0x10,0x20 = vmovups (%rax), %xmm12 +0xc4,0x41,0x78,0x10,0xe3 = vmovups %xmm11, %xmm12 +0xc5,0x78,0x11,0x18 = vmovups %xmm11, (%rax) +0xc5,0x79,0x10,0x20 = vmovupd (%rax), %xmm12 +0xc4,0x41,0x79,0x10,0xe3 = vmovupd %xmm11, %xmm12 +0xc5,0x79,0x11,0x18 = vmovupd %xmm11, (%rax) +0xc5,0x78,0x13,0x18 = vmovlps %xmm11, (%rax) +0xc5,0x18,0x12,0x28 = vmovlps (%rax), %xmm12, %xmm13 +0xc5,0x79,0x13,0x18 = vmovlpd %xmm11, (%rax) +0xc5,0x19,0x12,0x28 = vmovlpd (%rax), %xmm12, %xmm13 +0xc5,0x78,0x17,0x18 = vmovhps %xmm11, (%rax) +0xc5,0x18,0x16,0x28 = vmovhps (%rax), %xmm12, %xmm13 +0xc5,0x79,0x17,0x18 = vmovhpd %xmm11, (%rax) +0xc5,0x19,0x16,0x28 = vmovhpd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x18,0x16,0xeb = vmovlhps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0x12,0xeb = vmovhlps %xmm11, %xmm12, %xmm13 +0xc4,0xc1,0x7a,0x2d,0xc3 = vcvtss2si %xmm11, %eax +0xc5,0xfa,0x2d,0x18 = vcvtss2si (%rax), %ebx +0xc4,0x41,0x78,0x5b,0xe2 = vcvtdq2ps %xmm10, %xmm12 +0xc5,0x78,0x5b,0x20 = vcvtdq2ps (%rax), %xmm12 +0xc4,0x41,0x13,0x5a,0xd4 = vcvtsd2ss %xmm12, %xmm13, %xmm10 +0xc5,0x13,0x5a,0x10 = vcvtsd2ss (%rax), %xmm13, %xmm10 +0xc4,0x41,0x79,0x5b,0xdc = vcvtps2dq %xmm12, %xmm11 +0xc5,0x79,0x5b,0x18 = vcvtps2dq (%rax), %xmm11 +0xc4,0x41,0x12,0x5a,0xd4 = vcvtss2sd %xmm12, %xmm13, %xmm10 +0xc5,0x12,0x5a,0x10 = vcvtss2sd (%rax), %xmm13, %xmm10 +0xc4,0x41,0x78,0x5b,0xd5 = vcvtdq2ps %xmm13, %xmm10 +0xc5,0x78,0x5b,0x29 = vcvtdq2ps (%rcx), %xmm13 +0xc4,0x41,0x7a,0x5b,0xdc = vcvttps2dq %xmm12, %xmm11 +0xc5,0x7a,0x5b,0x18 = vcvttps2dq (%rax), %xmm11 +0xc4,0x41,0x78,0x5a,0xdc = vcvtps2pd %xmm12, %xmm11 +0xc5,0x78,0x5a,0x18 = vcvtps2pd (%rax), %xmm11 +0xc4,0x41,0x79,0x5a,0xdc = vcvtpd2ps %xmm12, %xmm11 +0xc4,0x41,0x79,0x51,0xe3 = vsqrtpd %xmm11, %xmm12 +0xc5,0x79,0x51,0x20 = vsqrtpd (%rax), %xmm12 +0xc4,0x41,0x78,0x51,0xe3 = vsqrtps %xmm11, %xmm12 +0xc5,0x78,0x51,0x20 = vsqrtps (%rax), %xmm12 +0xc4,0x41,0x1b,0x51,0xd3 = vsqrtsd %xmm11, %xmm12, %xmm10 +0xc5,0x1b,0x51,0x10 = vsqrtsd (%rax), %xmm12, %xmm10 +0xc4,0x41,0x1a,0x51,0xd3 = vsqrtss %xmm11, %xmm12, %xmm10 +0xc5,0x1a,0x51,0x10 = vsqrtss (%rax), %xmm12, %xmm10 +0xc4,0x41,0x78,0x52,0xe3 = vrsqrtps %xmm11, %xmm12 +0xc5,0x78,0x52,0x20 = vrsqrtps (%rax), %xmm12 +0xc4,0x41,0x1a,0x52,0xd3 = vrsqrtss %xmm11, %xmm12, %xmm10 +0xc5,0x1a,0x52,0x10 = vrsqrtss (%rax), %xmm12, %xmm10 +0xc4,0x41,0x78,0x53,0xe3 = vrcpps %xmm11, %xmm12 +0xc5,0x78,0x53,0x20 = vrcpps (%rax), %xmm12 +0xc4,0x41,0x1a,0x53,0xd3 = vrcpss %xmm11, %xmm12, %xmm10 +0xc5,0x1a,0x53,0x10 = vrcpss (%rax), %xmm12, %xmm10 +0xc5,0x79,0xe7,0x18 = vmovntdq %xmm11, (%rax) +0xc5,0x79,0x2b,0x18 = vmovntpd %xmm11, (%rax) +0xc5,0x78,0x2b,0x18 = vmovntps %xmm11, (%rax) +0xc5,0xf8,0xae,0x15,0xfc,0xff,0xff,0xff = vldmxcsr -4(%rip) +0xc5,0xf8,0xae,0x5c,0x24,0xfc = vstmxcsr -4(%rsp) +0xc4,0x41,0x19,0xf8,0xeb = vpsubb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xf8,0x28 = vpsubb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xf9,0xeb = vpsubw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xf9,0x28 = vpsubw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xfa,0xeb = vpsubd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xfa,0x28 = vpsubd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xfb,0xeb = vpsubq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xfb,0x28 = vpsubq (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe8,0xeb = vpsubsb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe8,0x28 = vpsubsb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe9,0xeb = vpsubsw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe9,0x28 = vpsubsw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xd8,0xeb = vpsubusb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xd8,0x28 = vpsubusb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xd9,0xeb = vpsubusw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xd9,0x28 = vpsubusw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xfc,0xeb = vpaddb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xfc,0x28 = vpaddb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xfd,0xeb = vpaddw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xfd,0x28 = vpaddw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xfe,0xeb = vpaddd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xfe,0x28 = vpaddd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xd4,0xeb = vpaddq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xd4,0x28 = vpaddq (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xec,0xeb = vpaddsb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xec,0x28 = vpaddsb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xed,0xeb = vpaddsw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xed,0x28 = vpaddsw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xdc,0xeb = vpaddusb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xdc,0x28 = vpaddusb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xdd,0xeb = vpaddusw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xdd,0x28 = vpaddusw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe4,0xeb = vpmulhuw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe4,0x28 = vpmulhuw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe5,0xeb = vpmulhw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe5,0x28 = vpmulhw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xd5,0xeb = vpmullw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xd5,0x28 = vpmullw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xf4,0xeb = vpmuludq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xf4,0x28 = vpmuludq (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe0,0xeb = vpavgb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe0,0x28 = vpavgb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe3,0xeb = vpavgw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe3,0x28 = vpavgw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xea,0xeb = vpminsw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xea,0x28 = vpminsw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xda,0xeb = vpminub %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xda,0x28 = vpminub (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xee,0xeb = vpmaxsw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xee,0x28 = vpmaxsw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xde,0xeb = vpmaxub %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xde,0x28 = vpmaxub (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xf6,0xeb = vpsadbw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xf6,0x28 = vpsadbw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xf1,0xeb = vpsllw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xf1,0x28 = vpsllw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xf2,0xeb = vpslld %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xf2,0x28 = vpslld (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xf3,0xeb = vpsllq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xf3,0x28 = vpsllq (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe1,0xeb = vpsraw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe1,0x28 = vpsraw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xe2,0xeb = vpsrad %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xe2,0x28 = vpsrad (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xd1,0xeb = vpsrlw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xd1,0x28 = vpsrlw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xd2,0xeb = vpsrld %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xd2,0x28 = vpsrld (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xd3,0xeb = vpsrlq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xd3,0x28 = vpsrlq (%rax), %xmm12, %xmm13 +0xc4,0xc1,0x11,0x72,0xf4,0x0a = vpslld $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x73,0xfc,0x0a = vpslldq $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x73,0xf4,0x0a = vpsllq $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x71,0xf4,0x0a = vpsllw $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x72,0xe4,0x0a = vpsrad $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x71,0xe4,0x0a = vpsraw $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x72,0xd4,0x0a = vpsrld $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x73,0xdc,0x0a = vpsrldq $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x73,0xd4,0x0a = vpsrlq $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x71,0xd4,0x0a = vpsrlw $10, %xmm12, %xmm13 +0xc4,0xc1,0x11,0x72,0xf4,0x0a = vpslld $10, %xmm12, %xmm13 +0xc4,0x41,0x19,0xdb,0xeb = vpand %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xdb,0x28 = vpand (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xeb,0xeb = vpor %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xeb,0x28 = vpor (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xef,0xeb = vpxor %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xef,0x28 = vpxor (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0xdf,0xeb = vpandn %xmm11, %xmm12, %xmm13 +0xc5,0x19,0xdf,0x28 = vpandn (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x74,0xeb = vpcmpeqb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x74,0x28 = vpcmpeqb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x75,0xeb = vpcmpeqw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x75,0x28 = vpcmpeqw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x76,0xeb = vpcmpeqd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x76,0x28 = vpcmpeqd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x64,0xeb = vpcmpgtb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x64,0x28 = vpcmpgtb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x65,0xeb = vpcmpgtw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x65,0x28 = vpcmpgtw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x66,0xeb = vpcmpgtd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x66,0x28 = vpcmpgtd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x63,0xeb = vpacksswb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x63,0x28 = vpacksswb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x6b,0xeb = vpackssdw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x6b,0x28 = vpackssdw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x67,0xeb = vpackuswb %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x67,0x28 = vpackuswb (%rax), %xmm12, %xmm13 +0xc4,0x41,0x79,0x70,0xec,0x04 = vpshufd $4, %xmm12, %xmm13 +0xc5,0x79,0x70,0x28,0x04 = vpshufd $4, (%rax), %xmm13 +0xc4,0x41,0x7a,0x70,0xec,0x04 = vpshufhw $4, %xmm12, %xmm13 +0xc5,0x7a,0x70,0x28,0x04 = vpshufhw $4, (%rax), %xmm13 +0xc4,0x41,0x7b,0x70,0xec,0x04 = vpshuflw $4, %xmm12, %xmm13 +0xc5,0x7b,0x70,0x28,0x04 = vpshuflw $4, (%rax), %xmm13 +0xc4,0x41,0x19,0x60,0xeb = vpunpcklbw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x60,0x28 = vpunpcklbw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x61,0xeb = vpunpcklwd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x61,0x28 = vpunpcklwd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x62,0xeb = vpunpckldq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x62,0x28 = vpunpckldq (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x6c,0xeb = vpunpcklqdq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x6c,0x28 = vpunpcklqdq (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x68,0xeb = vpunpckhbw %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x68,0x28 = vpunpckhbw (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x69,0xeb = vpunpckhwd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x69,0x28 = vpunpckhwd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x6a,0xeb = vpunpckhdq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x6a,0x28 = vpunpckhdq (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x6d,0xeb = vpunpckhqdq %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x6d,0x28 = vpunpckhqdq (%rax), %xmm12, %xmm13 +0xc5,0x19,0xc4,0xe8,0x07 = vpinsrw $7, %eax, %xmm12, %xmm13 +0xc5,0x19,0xc4,0x28,0x07 = vpinsrw $7, (%rax), %xmm12, %xmm13 +0xc4,0xc1,0x79,0xc5,0xc4,0x07 = vpextrw $7, %xmm12, %eax +0xc4,0xc1,0x79,0xd7,0xc4 = vpmovmskb %xmm12, %eax +0xc4,0x41,0x79,0xf7,0xfe = vmaskmovdqu %xmm14, %xmm15 +0xc5,0x79,0x6e,0xf0 = vmovd %eax, %xmm14 +0xc5,0x79,0x6e,0x30 = vmovd (%rax), %xmm14 +0xc5,0x79,0x7e,0x30 = vmovd %xmm14, (%rax) +0xc4,0x61,0xf9,0x6e,0xf0 = vmovq %rax, %xmm14 +0xc4,0xe1,0xf9,0x7e,0xc0 = vmovq %xmm0, %rax +0xc5,0x79,0xd6,0x30 = vmovq %xmm14, (%rax) +0xc4,0x41,0x7a,0x7e,0xe6 = vmovq %xmm14, %xmm12 +0xc5,0x7a,0x7e,0x30 = vmovq (%rax), %xmm14 +0xc4,0x61,0xf9,0x6e,0xf0 = vmovq %rax, %xmm14 +0xc4,0x61,0xf9,0x7e,0xf0 = vmovq %xmm14, %rax +0xc4,0x41,0x7b,0xe6,0xe3 = vcvtpd2dq %xmm11, %xmm12 +0xc4,0x41,0x7a,0xe6,0xe3 = vcvtdq2pd %xmm11, %xmm12 +0xc5,0x7a,0xe6,0x20 = vcvtdq2pd (%rax), %xmm12 +0xc4,0x41,0x7a,0x16,0xe3 = vmovshdup %xmm11, %xmm12 +0xc5,0x7a,0x16,0x20 = vmovshdup (%rax), %xmm12 +0xc4,0x41,0x7a,0x12,0xe3 = vmovsldup %xmm11, %xmm12 +0xc5,0x7a,0x12,0x20 = vmovsldup (%rax), %xmm12 +0xc4,0x41,0x7b,0x12,0xe3 = vmovddup %xmm11, %xmm12 +0xc5,0x7b,0x12,0x20 = vmovddup (%rax), %xmm12 +0xc4,0x41,0x1b,0xd0,0xeb = vaddsubps %xmm11, %xmm12, %xmm13 +0xc5,0x23,0xd0,0x20 = vaddsubps (%rax), %xmm11, %xmm12 +0xc4,0x41,0x19,0xd0,0xeb = vaddsubpd %xmm11, %xmm12, %xmm13 +0xc5,0x21,0xd0,0x20 = vaddsubpd (%rax), %xmm11, %xmm12 +0xc4,0x41,0x1b,0x7c,0xeb = vhaddps %xmm11, %xmm12, %xmm13 +0xc5,0x1b,0x7c,0x28 = vhaddps (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x7c,0xeb = vhaddpd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x7c,0x28 = vhaddpd (%rax), %xmm12, %xmm13 +0xc4,0x41,0x1b,0x7d,0xeb = vhsubps %xmm11, %xmm12, %xmm13 +0xc5,0x1b,0x7d,0x28 = vhsubps (%rax), %xmm12, %xmm13 +0xc4,0x41,0x19,0x7d,0xeb = vhsubpd %xmm11, %xmm12, %xmm13 +0xc5,0x19,0x7d,0x28 = vhsubpd (%rax), %xmm12, %xmm13 +0xc4,0x42,0x79,0x1c,0xe3 = vpabsb %xmm11, %xmm12 +0xc4,0x62,0x79,0x1c,0x20 = vpabsb (%rax), %xmm12 +0xc4,0x42,0x79,0x1d,0xe3 = vpabsw %xmm11, %xmm12 +0xc4,0x62,0x79,0x1d,0x20 = vpabsw (%rax), %xmm12 +0xc4,0x42,0x79,0x1e,0xe3 = vpabsd %xmm11, %xmm12 +0xc4,0x62,0x79,0x1e,0x20 = vpabsd (%rax), %xmm12 +0xc4,0x42,0x19,0x01,0xeb = vphaddw %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x01,0x28 = vphaddw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x02,0xeb = vphaddd %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x02,0x28 = vphaddd (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x03,0xeb = vphaddsw %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x03,0x28 = vphaddsw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x05,0xeb = vphsubw %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x05,0x28 = vphsubw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x06,0xeb = vphsubd %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x06,0x28 = vphsubd (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x07,0xeb = vphsubsw %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x07,0x28 = vphsubsw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x04,0xeb = vpmaddubsw %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x04,0x28 = vpmaddubsw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x00,0xeb = vpshufb %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x00,0x28 = vpshufb (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x08,0xeb = vpsignb %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x08,0x28 = vpsignb (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x09,0xeb = vpsignw %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x09,0x28 = vpsignw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x0a,0xeb = vpsignd %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x0a,0x28 = vpsignd (%rax), %xmm12, %xmm13 +0xc4,0x42,0x19,0x0b,0xeb = vpmulhrsw %xmm11, %xmm12, %xmm13 +0xc4,0x62,0x19,0x0b,0x28 = vpmulhrsw (%rax), %xmm12, %xmm13 +0xc4,0x43,0x19,0x0f,0xeb,0x07 = vpalignr $7, %xmm11, %xmm12, %xmm13 +0xc4,0x63,0x19,0x0f,0x28,0x07 = vpalignr $7, (%rax), %xmm12, %xmm13 +0xc4,0x43,0x19,0x0b,0xeb,0x07 = vroundsd $7, %xmm11, %xmm12, %xmm13 +0xc4,0x63,0x19,0x0b,0x28,0x07 = vroundsd $7, (%rax), %xmm12, %xmm13 +0xc4,0x43,0x19,0x0a,0xeb,0x07 = vroundss $7, %xmm11, %xmm12, %xmm13 +0xc4,0x63,0x19,0x0a,0x28,0x07 = vroundss $7, (%rax), %xmm12, %xmm13 +0xc4,0x43,0x79,0x09,0xec,0x07 = vroundpd $7, %xmm12, %xmm13 +0xc4,0x63,0x79,0x09,0x28,0x07 = vroundpd $7, (%rax), %xmm13 +0xc4,0x43,0x79,0x08,0xec,0x07 = vroundps $7, %xmm12, %xmm13 +0xc4,0x63,0x79,0x08,0x28,0x07 = vroundps $7, (%rax), %xmm13 +0xc4,0x42,0x79,0x41,0xec = vphminposuw %xmm12, %xmm13 +0xc4,0x62,0x79,0x41,0x20 = vphminposuw (%rax), %xmm12 +0xc4,0x42,0x11,0x2b,0xdc = vpackusdw %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x2b,0x28 = vpackusdw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x29,0xdc = vpcmpeqq %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x29,0x28 = vpcmpeqq (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x38,0xdc = vpminsb %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x38,0x28 = vpminsb (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x39,0xdc = vpminsd %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x39,0x28 = vpminsd (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x3b,0xdc = vpminud %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x3b,0x28 = vpminud (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x3a,0xdc = vpminuw %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x3a,0x28 = vpminuw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x3c,0xdc = vpmaxsb %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x3c,0x28 = vpmaxsb (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x3d,0xdc = vpmaxsd %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x3d,0x28 = vpmaxsd (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x3f,0xdc = vpmaxud %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x3f,0x28 = vpmaxud (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x3e,0xdc = vpmaxuw %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x3e,0x28 = vpmaxuw (%rax), %xmm12, %xmm13 +0xc4,0x42,0x11,0x28,0xdc = vpmuldq %xmm12, %xmm13, %xmm11 +0xc4,0x62,0x19,0x28,0x28 = vpmuldq (%rax), %xmm12, %xmm13 +0xc4,0x42,0x51,0x40,0xdc = vpmulld %xmm12, %xmm5, %xmm11 +0xc4,0x62,0x51,0x40,0x28 = vpmulld (%rax), %xmm5, %xmm13 +0xc4,0x43,0x51,0x0c,0xdc,0x03 = vblendps $3, %xmm12, %xmm5, %xmm11 +0xc4,0x63,0x51,0x0c,0x18,0x03 = vblendps $3, (%rax), %xmm5, %xmm11 +0xc4,0x43,0x51,0x0d,0xdc,0x03 = vblendpd $3, %xmm12, %xmm5, %xmm11 +0xc4,0x63,0x51,0x0d,0x18,0x03 = vblendpd $3, (%rax), %xmm5, %xmm11 +0xc4,0x43,0x51,0x0e,0xdc,0x03 = vpblendw $3, %xmm12, %xmm5, %xmm11 +0xc4,0x63,0x51,0x0e,0x18,0x03 = vpblendw $3, (%rax), %xmm5, %xmm11 +0xc4,0x43,0x51,0x42,0xdc,0x03 = vmpsadbw $3, %xmm12, %xmm5, %xmm11 +0xc4,0x63,0x51,0x42,0x18,0x03 = vmpsadbw $3, (%rax), %xmm5, %xmm11 +0xc4,0x43,0x51,0x40,0xdc,0x03 = vdpps $3, %xmm12, %xmm5, %xmm11 +0xc4,0x63,0x51,0x40,0x18,0x03 = vdpps $3, (%rax), %xmm5, %xmm11 +0xc4,0x43,0x51,0x41,0xdc,0x03 = vdppd $3, %xmm12, %xmm5, %xmm11 +0xc4,0x63,0x51,0x41,0x18,0x03 = vdppd $3, (%rax), %xmm5, %xmm11 +0xc4,0x63,0x21,0x4b,0xed,0xc0 = vblendvpd %xmm12, %xmm5, %xmm11, %xmm13 +0xc4,0x63,0x21,0x4b,0x28,0xc0 = vblendvpd %xmm12, (%rax), %xmm11, %xmm13 +0xc4,0x63,0x21,0x4a,0xed,0xc0 = vblendvps %xmm12, %xmm5, %xmm11, %xmm13 +0xc4,0x63,0x21,0x4a,0x28,0xc0 = vblendvps %xmm12, (%rax), %xmm11, %xmm13 +0xc4,0x63,0x21,0x4c,0xed,0xc0 = vpblendvb %xmm12, %xmm5, %xmm11, %xmm13 +0xc4,0x63,0x21,0x4c,0x28,0xc0 = vpblendvb %xmm12, (%rax), %xmm11, %xmm13 +0xc4,0x42,0x79,0x20,0xd4 = vpmovsxbw %xmm12, %xmm10 +0xc4,0x62,0x79,0x20,0x20 = vpmovsxbw (%rax), %xmm12 +0xc4,0x42,0x79,0x23,0xd4 = vpmovsxwd %xmm12, %xmm10 +0xc4,0x62,0x79,0x23,0x20 = vpmovsxwd (%rax), %xmm12 +0xc4,0x42,0x79,0x25,0xd4 = vpmovsxdq %xmm12, %xmm10 +0xc4,0x62,0x79,0x25,0x20 = vpmovsxdq (%rax), %xmm12 +0xc4,0x42,0x79,0x30,0xd4 = vpmovzxbw %xmm12, %xmm10 +0xc4,0x62,0x79,0x30,0x20 = vpmovzxbw (%rax), %xmm12 +0xc4,0x42,0x79,0x33,0xd4 = vpmovzxwd %xmm12, %xmm10 +0xc4,0x62,0x79,0x33,0x20 = vpmovzxwd (%rax), %xmm12 +0xc4,0x42,0x79,0x35,0xd4 = vpmovzxdq %xmm12, %xmm10 +0xc4,0x62,0x79,0x35,0x20 = vpmovzxdq (%rax), %xmm12 +0xc4,0x42,0x79,0x22,0xd4 = vpmovsxbq %xmm12, %xmm10 +0xc4,0x62,0x79,0x22,0x20 = vpmovsxbq (%rax), %xmm12 +0xc4,0x42,0x79,0x32,0xd4 = vpmovzxbq %xmm12, %xmm10 +0xc4,0x62,0x79,0x32,0x20 = vpmovzxbq (%rax), %xmm12 +0xc4,0x42,0x79,0x21,0xd4 = vpmovsxbd %xmm12, %xmm10 +0xc4,0x62,0x79,0x21,0x20 = vpmovsxbd (%rax), %xmm12 +0xc4,0x42,0x79,0x24,0xd4 = vpmovsxwq %xmm12, %xmm10 +0xc4,0x62,0x79,0x24,0x20 = vpmovsxwq (%rax), %xmm12 +0xc4,0x42,0x79,0x31,0xd4 = vpmovzxbd %xmm12, %xmm10 +0xc4,0x62,0x79,0x31,0x20 = vpmovzxbd (%rax), %xmm12 +0xc4,0x42,0x79,0x34,0xd4 = vpmovzxwq %xmm12, %xmm10 +0xc4,0x62,0x79,0x34,0x20 = vpmovzxwq (%rax), %xmm12 +0xc4,0xc1,0x79,0xc5,0xc4,0x07 = vpextrw $7, %xmm12, %eax +0xc4,0x63,0x79,0x15,0x20,0x07 = vpextrw $7, %xmm12, (%rax) +0xc4,0x63,0x79,0x16,0xe0,0x07 = vpextrd $7, %xmm12, %eax +0xc4,0x63,0x79,0x16,0x20,0x07 = vpextrd $7, %xmm12, (%rax) +0xc4,0x63,0x79,0x14,0xe0,0x07 = vpextrb $7, %xmm12, %eax +0xc4,0x63,0x79,0x14,0x20,0x07 = vpextrb $7, %xmm12, (%rax) +0xc4,0x63,0xf9,0x16,0xe1,0x07 = vpextrq $7, %xmm12, %rcx +0xc4,0x63,0xf9,0x16,0x21,0x07 = vpextrq $7, %xmm12, (%rcx) +0xc4,0x63,0x79,0x17,0x20,0x07 = vextractps $7, %xmm12, (%rax) +0xc4,0x63,0x79,0x17,0xe0,0x07 = vextractps $7, %xmm12, %eax +0xc5,0x19,0xc4,0xd0,0x07 = vpinsrw $7, %eax, %xmm12, %xmm10 +0xc5,0x19,0xc4,0x10,0x07 = vpinsrw $7, (%rax), %xmm12, %xmm10 +0xc4,0x63,0x19,0x20,0xd0,0x07 = vpinsrb $7, %eax, %xmm12, %xmm10 +0xc4,0x63,0x19,0x20,0x10,0x07 = vpinsrb $7, (%rax), %xmm12, %xmm10 +0xc4,0x63,0x19,0x22,0xd0,0x07 = vpinsrd $7, %eax, %xmm12, %xmm10 +0xc4,0x63,0x19,0x22,0x10,0x07 = vpinsrd $7, (%rax), %xmm12, %xmm10 +0xc4,0x63,0x99,0x22,0xd0,0x07 = vpinsrq $7, %rax, %xmm12, %xmm10 +0xc4,0x63,0x99,0x22,0x10,0x07 = vpinsrq $7, (%rax), %xmm12, %xmm10 +0xc4,0x43,0x29,0x21,0xdc,0x07 = vinsertps $7, %xmm12, %xmm10, %xmm11 +0xc4,0x63,0x29,0x21,0x18,0x07 = vinsertps $7, (%rax), %xmm10, %xmm11 +0xc4,0x42,0x79,0x17,0xd4 = vptest %xmm12, %xmm10 +0xc4,0x62,0x79,0x17,0x20 = vptest (%rax), %xmm12 +0xc4,0x62,0x79,0x2a,0x20 = vmovntdqa (%rax), %xmm12 +0xc4,0x42,0x29,0x37,0xdc = vpcmpgtq %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x37,0x28 = vpcmpgtq (%rax), %xmm10, %xmm13 +0xc4,0x43,0x79,0x62,0xd4,0x07 = vpcmpistrm $7, %xmm12, %xmm10 +0xc4,0x63,0x79,0x62,0x10,0x07 = vpcmpistrm $7, (%rax), %xmm10 +0xc4,0x43,0x79,0x60,0xd4,0x07 = vpcmpestrm $7, %xmm12, %xmm10 +0xc4,0x63,0x79,0x60,0x10,0x07 = vpcmpestrm $7, (%rax), %xmm10 +0xc4,0x43,0x79,0x63,0xd4,0x07 = vpcmpistri $7, %xmm12, %xmm10 +0xc4,0x63,0x79,0x63,0x10,0x07 = vpcmpistri $7, (%rax), %xmm10 +0xc4,0x43,0x79,0x61,0xd4,0x07 = vpcmpestri $7, %xmm12, %xmm10 +0xc4,0x63,0x79,0x61,0x10,0x07 = vpcmpestri $7, (%rax), %xmm10 +0xc4,0x42,0x79,0xdb,0xd4 = vaesimc %xmm12, %xmm10 +0xc4,0x62,0x79,0xdb,0x20 = vaesimc (%rax), %xmm12 +0xc4,0x42,0x29,0xdc,0xdc = vaesenc %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xdc,0x28 = vaesenc (%rax), %xmm10, %xmm13 +0xc4,0x42,0x29,0xdd,0xdc = vaesenclast %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xdd,0x28 = vaesenclast (%rax), %xmm10, %xmm13 +0xc4,0x42,0x29,0xde,0xdc = vaesdec %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xde,0x28 = vaesdec (%rax), %xmm10, %xmm13 +0xc4,0x42,0x29,0xdf,0xdc = vaesdeclast %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xdf,0x28 = vaesdeclast (%rax), %xmm10, %xmm13 +0xc4,0x43,0x79,0xdf,0xd4,0x07 = vaeskeygenassist $7, %xmm12, %xmm10 +0xc4,0x63,0x79,0xdf,0x10,0x07 = vaeskeygenassist $7, (%rax), %xmm10 +0xc4,0x41,0x18,0xc2,0xeb,0x08 = vcmpeq_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x09 = vcmpngeps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0a = vcmpngtps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0b = vcmpfalseps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0c = vcmpneq_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0d = vcmpgeps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0e = vcmpgtps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x0f = vcmptrueps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x10 = vcmpeq_osps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x11 = vcmplt_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x12 = vcmple_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x13 = vcmpunord_sps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x14 = vcmpneq_usps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x15 = vcmpnlt_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x16 = vcmpnle_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x17 = vcmpord_sps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x18 = vcmpeq_usps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x19 = vcmpnge_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1a = vcmpngt_uqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1b = vcmpfalse_osps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1c = vcmpneq_osps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1d = vcmpge_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1e = vcmpgt_oqps %xmm11, %xmm12, %xmm13 +0xc4,0x41,0x18,0xc2,0xeb,0x1f = vcmptrue_usps %xmm11, %xmm12, %xmm13 +0xc5,0x7c,0x28,0x20 = vmovaps (%rax), %ymm12 +0xc4,0x41,0x7c,0x28,0xe3 = vmovaps %ymm11, %ymm12 +0xc5,0x7c,0x29,0x18 = vmovaps %ymm11, (%rax) +0xc5,0x7d,0x28,0x20 = vmovapd (%rax), %ymm12 +0xc4,0x41,0x7d,0x28,0xe3 = vmovapd %ymm11, %ymm12 +0xc5,0x7d,0x29,0x18 = vmovapd %ymm11, (%rax) +0xc5,0x7c,0x10,0x20 = vmovups (%rax), %ymm12 +0xc4,0x41,0x7c,0x10,0xe3 = vmovups %ymm11, %ymm12 +0xc5,0x7c,0x11,0x18 = vmovups %ymm11, (%rax) +0xc5,0x7d,0x10,0x20 = vmovupd (%rax), %ymm12 +0xc4,0x41,0x7d,0x10,0xe3 = vmovupd %ymm11, %ymm12 +0xc5,0x7d,0x11,0x18 = vmovupd %ymm11, (%rax) +0xc4,0xc1,0x1c,0x15,0xe3 = vunpckhps %ymm11, %ymm12, %ymm4 +0xc4,0xc1,0x1d,0x15,0xe3 = vunpckhpd %ymm11, %ymm12, %ymm4 +0xc4,0xc1,0x1c,0x14,0xe3 = vunpcklps %ymm11, %ymm12, %ymm4 +0xc4,0xc1,0x1d,0x14,0xe3 = vunpcklpd %ymm11, %ymm12, %ymm4 +0xc5,0x1c,0x15,0x54,0xcb,0xfc = vunpckhps -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x1d,0x15,0x54,0xcb,0xfc = vunpckhpd -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x1c,0x14,0x54,0xcb,0xfc = vunpcklps -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x1d,0x14,0x54,0xcb,0xfc = vunpcklpd -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x7d,0xe7,0x18 = vmovntdq %ymm11, (%rax) +0xc5,0x7d,0x2b,0x18 = vmovntpd %ymm11, (%rax) +0xc5,0x7c,0x2b,0x18 = vmovntps %ymm11, (%rax) +0xc4,0xc1,0x78,0x50,0xc4 = vmovmskps %xmm12, %eax +0xc4,0xc1,0x79,0x50,0xc4 = vmovmskpd %xmm12, %eax +0xc4,0xc1,0x5c,0x5f,0xf4 = vmaxps %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5d,0x5f,0xf4 = vmaxpd %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5c,0x5d,0xf4 = vminps %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5d,0x5d,0xf4 = vminpd %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5c,0x5c,0xf4 = vsubps %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5d,0x5c,0xf4 = vsubpd %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5c,0x5e,0xf4 = vdivps %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5d,0x5e,0xf4 = vdivpd %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5c,0x58,0xf4 = vaddps %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5d,0x58,0xf4 = vaddpd %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5c,0x59,0xf4 = vmulps %ymm12, %ymm4, %ymm6 +0xc4,0xc1,0x5d,0x59,0xf4 = vmulpd %ymm12, %ymm4, %ymm6 +0xc5,0xdc,0x5f,0x30 = vmaxps (%rax), %ymm4, %ymm6 +0xc5,0xdd,0x5f,0x30 = vmaxpd (%rax), %ymm4, %ymm6 +0xc5,0xdc,0x5d,0x30 = vminps (%rax), %ymm4, %ymm6 +0xc5,0xdd,0x5d,0x30 = vminpd (%rax), %ymm4, %ymm6 +0xc5,0xdc,0x5c,0x30 = vsubps (%rax), %ymm4, %ymm6 +0xc5,0xdd,0x5c,0x30 = vsubpd (%rax), %ymm4, %ymm6 +0xc5,0xdc,0x5e,0x30 = vdivps (%rax), %ymm4, %ymm6 +0xc5,0xdd,0x5e,0x30 = vdivpd (%rax), %ymm4, %ymm6 +0xc5,0xdc,0x58,0x30 = vaddps (%rax), %ymm4, %ymm6 +0xc5,0xdd,0x58,0x30 = vaddpd (%rax), %ymm4, %ymm6 +0xc5,0xdc,0x59,0x30 = vmulps (%rax), %ymm4, %ymm6 +0xc5,0xdd,0x59,0x30 = vmulpd (%rax), %ymm4, %ymm6 +0xc4,0x41,0x7d,0x51,0xe3 = vsqrtpd %ymm11, %ymm12 +0xc5,0x7d,0x51,0x20 = vsqrtpd (%rax), %ymm12 +0xc4,0x41,0x7c,0x51,0xe3 = vsqrtps %ymm11, %ymm12 +0xc5,0x7c,0x51,0x20 = vsqrtps (%rax), %ymm12 +0xc4,0x41,0x7c,0x52,0xe3 = vrsqrtps %ymm11, %ymm12 +0xc5,0x7c,0x52,0x20 = vrsqrtps (%rax), %ymm12 +0xc4,0x41,0x7c,0x53,0xe3 = vrcpps %ymm11, %ymm12 +0xc5,0x7c,0x53,0x20 = vrcpps (%rax), %ymm12 +0xc4,0x41,0x0c,0x54,0xdc = vandps %ymm12, %ymm14, %ymm11 +0xc4,0x41,0x0d,0x54,0xdc = vandpd %ymm12, %ymm14, %ymm11 +0xc5,0x1c,0x54,0x54,0xcb,0xfc = vandps -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x1d,0x54,0x54,0xcb,0xfc = vandpd -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc4,0x41,0x0c,0x56,0xdc = vorps %ymm12, %ymm14, %ymm11 +0xc4,0x41,0x0d,0x56,0xdc = vorpd %ymm12, %ymm14, %ymm11 +0xc5,0x1c,0x56,0x54,0xcb,0xfc = vorps -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x1d,0x56,0x54,0xcb,0xfc = vorpd -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc4,0x41,0x0c,0x57,0xdc = vxorps %ymm12, %ymm14, %ymm11 +0xc4,0x41,0x0d,0x57,0xdc = vxorpd %ymm12, %ymm14, %ymm11 +0xc5,0x1c,0x57,0x54,0xcb,0xfc = vxorps -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x1d,0x57,0x54,0xcb,0xfc = vxorpd -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc4,0x41,0x0c,0x55,0xdc = vandnps %ymm12, %ymm14, %ymm11 +0xc4,0x41,0x0d,0x55,0xdc = vandnpd %ymm12, %ymm14, %ymm11 +0xc5,0x1c,0x55,0x54,0xcb,0xfc = vandnps -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc5,0x1d,0x55,0x54,0xcb,0xfc = vandnpd -4(%rbx, %rcx, 8), %ymm12, %ymm10 +0xc4,0x41,0x7c,0x5a,0xe5 = vcvtps2pd %xmm13, %ymm12 +0xc5,0x7c,0x5a,0x20 = vcvtps2pd (%rax), %ymm12 +0xc4,0x41,0x7e,0xe6,0xe5 = vcvtdq2pd %xmm13, %ymm12 +0xc5,0x7e,0xe6,0x20 = vcvtdq2pd (%rax), %ymm12 +0xc4,0x41,0x7c,0x5b,0xd4 = vcvtdq2ps %ymm12, %ymm10 +0xc5,0x7c,0x5b,0x20 = vcvtdq2ps (%rax), %ymm12 +0xc4,0x41,0x7d,0x5b,0xd4 = vcvtps2dq %ymm12, %ymm10 +0xc5,0x7d,0x5b,0x10 = vcvtps2dq (%rax), %ymm10 +0xc4,0x41,0x7e,0x5b,0xd4 = vcvttps2dq %ymm12, %ymm10 +0xc5,0x7e,0x5b,0x10 = vcvttps2dq (%rax), %ymm10 +0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dq %xmm11, %xmm10 +0xc4,0x41,0x7d,0xe6,0xd4 = vcvttpd2dq %ymm12, %xmm10 +0xc4,0x41,0x79,0xe6,0xd3 = vcvttpd2dq %xmm11, %xmm10 +0xc5,0x79,0xe6,0x18 = vcvttpd2dqx (%rax), %xmm11 +0xc4,0x41,0x7d,0xe6,0xdc = vcvttpd2dq %ymm12, %xmm11 +0xc5,0x7d,0xe6,0x18 = vcvttpd2dqy (%rax), %xmm11 +0xc4,0x41,0x7d,0x5a,0xd4 = vcvtpd2ps %ymm12, %xmm10 +0xc4,0x41,0x79,0x5a,0xd3 = vcvtpd2ps %xmm11, %xmm10 +0xc5,0x79,0x5a,0x18 = vcvtpd2psx (%rax), %xmm11 +0xc4,0x41,0x7d,0x5a,0xdc = vcvtpd2ps %ymm12, %xmm11 +0xc5,0x7d,0x5a,0x18 = vcvtpd2psy (%rax), %xmm11 +0xc4,0x41,0x7f,0xe6,0xd4 = vcvtpd2dq %ymm12, %xmm10 +0xc4,0x41,0x7f,0xe6,0xdc = vcvtpd2dq %ymm12, %xmm11 +0xc5,0x7f,0xe6,0x18 = vcvtpd2dqy (%rax), %xmm11 +0xc4,0x41,0x7b,0xe6,0xd3 = vcvtpd2dq %xmm11, %xmm10 +0xc5,0x7b,0xe6,0x18 = vcvtpd2dqx (%rax), %xmm11 +0xc4,0x41,0x1c,0xc2,0xeb,0x00 = vcmpeqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x02 = vcmpleps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x01 = vcmpltps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x04 = vcmpneqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x06 = vcmpnleps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x05 = vcmpnltps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x07 = vcmpordps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x03 = vcmpunordps %ymm11, %ymm12, %ymm13 +0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqps -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x02 = vcmpleps -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltps -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqps -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnleps -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltps -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x4c,0xc2,0x64,0xcb,0xfc,0x07 = vcmpordps -4(%rbx, %rcx, 8), %ymm6, %ymm12 +0xc5,0x1c,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordps -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x00 = vcmpeqpd %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x02 = vcmplepd %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x01 = vcmpltpd %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x04 = vcmpneqpd %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x06 = vcmpnlepd %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x05 = vcmpnltpd %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x07 = vcmpordpd %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1d,0xc2,0xeb,0x03 = vcmpunordpd %ymm11, %ymm12, %ymm13 +0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x00 = vcmpeqpd -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x02 = vcmplepd -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x01 = vcmpltpd -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x04 = vcmpneqpd -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x06 = vcmpnlepd -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x05 = vcmpnltpd -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc5,0x4d,0xc2,0x64,0xcb,0xfc,0x07 = vcmpordpd -4(%rbx, %rcx, 8), %ymm6, %ymm12 +0xc5,0x1d,0xc2,0x6c,0xcb,0xfc,0x03 = vcmpunordpd -4(%rbx, %rcx, 8), %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x08 = vcmpeq_uqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x09 = vcmpngeps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x0a = vcmpngtps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x0b = vcmpfalseps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x0c = vcmpneq_oqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x0d = vcmpgeps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x0e = vcmpgtps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x0f = vcmptrueps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x10 = vcmpeq_osps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x11 = vcmplt_oqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x12 = vcmple_oqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x13 = vcmpunord_sps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x14 = vcmpneq_usps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x15 = vcmpnlt_uqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x16 = vcmpnle_uqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x17 = vcmpord_sps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x18 = vcmpeq_usps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x19 = vcmpnge_uqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x1a = vcmpngt_uqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x1b = vcmpfalse_osps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x1c = vcmpneq_osps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x1d = vcmpge_oqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x1e = vcmpgt_oqps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1c,0xc2,0xeb,0x1f = vcmptrue_usps %ymm11, %ymm12, %ymm13 +0xc4,0x41,0x1f,0xd0,0xeb = vaddsubps %ymm11, %ymm12, %ymm13 +0xc5,0x27,0xd0,0x20 = vaddsubps (%rax), %ymm11, %ymm12 +0xc4,0x41,0x1d,0xd0,0xeb = vaddsubpd %ymm11, %ymm12, %ymm13 +0xc5,0x25,0xd0,0x20 = vaddsubpd (%rax), %ymm11, %ymm12 +0xc4,0x41,0x1f,0x7c,0xeb = vhaddps %ymm11, %ymm12, %ymm13 +0xc5,0x1f,0x7c,0x28 = vhaddps (%rax), %ymm12, %ymm13 +0xc4,0x41,0x1d,0x7c,0xeb = vhaddpd %ymm11, %ymm12, %ymm13 +0xc5,0x1d,0x7c,0x28 = vhaddpd (%rax), %ymm12, %ymm13 +0xc4,0x41,0x1f,0x7d,0xeb = vhsubps %ymm11, %ymm12, %ymm13 +0xc5,0x1f,0x7d,0x28 = vhsubps (%rax), %ymm12, %ymm13 +0xc4,0x41,0x1d,0x7d,0xeb = vhsubpd %ymm11, %ymm12, %ymm13 +0xc5,0x1d,0x7d,0x28 = vhsubpd (%rax), %ymm12, %ymm13 +0xc4,0x43,0x2d,0x0c,0xdc,0x03 = vblendps $3, %ymm12, %ymm10, %ymm11 +0xc4,0x63,0x2d,0x0c,0x18,0x03 = vblendps $3, (%rax), %ymm10, %ymm11 +0xc4,0x43,0x2d,0x0d,0xdc,0x03 = vblendpd $3, %ymm12, %ymm10, %ymm11 +0xc4,0x63,0x2d,0x0d,0x18,0x03 = vblendpd $3, (%rax), %ymm10, %ymm11 +0xc4,0x43,0x2d,0x40,0xdc,0x03 = vdpps $3, %ymm12, %ymm10, %ymm11 +0xc4,0x63,0x2d,0x40,0x18,0x03 = vdpps $3, (%rax), %ymm10, %ymm11 +0xc4,0x62,0x7d,0x1a,0x20 = vbroadcastf128 (%rax), %ymm12 +0xc4,0x62,0x7d,0x19,0x20 = vbroadcastsd (%rax), %ymm12 +0xc4,0x62,0x79,0x18,0x20 = vbroadcastss (%rax), %xmm12 +0xc4,0x62,0x7d,0x18,0x20 = vbroadcastss (%rax), %ymm12 +0xc4,0x43,0x1d,0x18,0xd4,0x07 = vinsertf128 $7, %xmm12, %ymm12, %ymm10 +0xc4,0x63,0x1d,0x18,0x10,0x07 = vinsertf128 $7, (%rax), %ymm12, %ymm10 +0xc4,0x43,0x7d,0x19,0xe4,0x07 = vextractf128 $7, %ymm12, %xmm12 +0xc4,0x63,0x7d,0x19,0x20,0x07 = vextractf128 $7, %ymm12, (%rax) +0xc4,0x62,0x29,0x2f,0x20 = vmaskmovpd %xmm12, %xmm10, (%rax) +0xc4,0x62,0x2d,0x2f,0x20 = vmaskmovpd %ymm12, %ymm10, (%rax) +0xc4,0x62,0x19,0x2d,0x10 = vmaskmovpd (%rax), %xmm12, %xmm10 +0xc4,0x62,0x1d,0x2d,0x10 = vmaskmovpd (%rax), %ymm12, %ymm10 +0xc4,0x62,0x29,0x2e,0x20 = vmaskmovps %xmm12, %xmm10, (%rax) +0xc4,0x62,0x2d,0x2e,0x20 = vmaskmovps %ymm12, %ymm10, (%rax) +0xc4,0x62,0x19,0x2c,0x10 = vmaskmovps (%rax), %xmm12, %xmm10 +0xc4,0x62,0x1d,0x2c,0x10 = vmaskmovps (%rax), %ymm12, %ymm10 +0xc4,0x43,0x79,0x04,0xd3,0x07 = vpermilps $7, %xmm11, %xmm10 +0xc4,0x43,0x7d,0x04,0xda,0x07 = vpermilps $7, %ymm10, %ymm11 +0xc4,0x63,0x79,0x04,0x10,0x07 = vpermilps $7, (%rax), %xmm10 +0xc4,0x63,0x7d,0x04,0x10,0x07 = vpermilps $7, (%rax), %ymm10 +0xc4,0x42,0x29,0x0c,0xdb = vpermilps %xmm11, %xmm10, %xmm11 +0xc4,0x42,0x2d,0x0c,0xdb = vpermilps %ymm11, %ymm10, %ymm11 +0xc4,0x62,0x29,0x0c,0x28 = vpermilps (%rax), %xmm10, %xmm13 +0xc4,0x62,0x2d,0x0c,0x18 = vpermilps (%rax), %ymm10, %ymm11 +0xc4,0x43,0x79,0x05,0xd3,0x07 = vpermilpd $7, %xmm11, %xmm10 +0xc4,0x43,0x7d,0x05,0xda,0x07 = vpermilpd $7, %ymm10, %ymm11 +0xc4,0x63,0x79,0x05,0x10,0x07 = vpermilpd $7, (%rax), %xmm10 +0xc4,0x63,0x7d,0x05,0x10,0x07 = vpermilpd $7, (%rax), %ymm10 +0xc4,0x42,0x29,0x0d,0xdb = vpermilpd %xmm11, %xmm10, %xmm11 +0xc4,0x42,0x2d,0x0d,0xdb = vpermilpd %ymm11, %ymm10, %ymm11 +0xc4,0x62,0x29,0x0d,0x28 = vpermilpd (%rax), %xmm10, %xmm13 +0xc4,0x62,0x2d,0x0d,0x18 = vpermilpd (%rax), %ymm10, %ymm11 +0xc4,0x43,0x2d,0x06,0xdc,0x07 = vperm2f128 $7, %ymm12, %ymm10, %ymm11 +0xc4,0x63,0x2d,0x06,0x18,0x07 = vperm2f128 $7, (%rax), %ymm10, %ymm11 +0xc4,0x41,0x7b,0x2d,0xc0 = vcvtsd2si %xmm8, %r8d +0xc5,0xfb,0x2d,0x09 = vcvtsd2si (%rcx), %ecx +0xc4,0xe1,0xfa,0x2d,0xcc = vcvtss2si %xmm4, %rcx +0xc4,0x61,0xfa,0x2d,0x01 = vcvtss2si (%rcx), %r8 +0xc4,0x41,0x3b,0x2a,0xf8 = vcvtsi2sdl %r8d, %xmm8, %xmm15 +0xc5,0x3b,0x2a,0x7d,0x00 = vcvtsi2sdl (%rbp), %xmm8, %xmm15 +0xc4,0xe1,0xdb,0x2a,0xf1 = vcvtsi2sdq %rcx, %xmm4, %xmm6 +0xc4,0xe1,0xdb,0x2a,0x31 = vcvtsi2sdq (%rcx), %xmm4, %xmm6 +0xc4,0xe1,0xda,0x2a,0xf1 = vcvtsi2ssq %rcx, %xmm4, %xmm6 +0xc4,0xe1,0xda,0x2a,0x31 = vcvtsi2ssq (%rcx), %xmm4, %xmm6 +0xc4,0xe1,0xfb,0x2c,0xcc = vcvttsd2si %xmm4, %rcx +0xc4,0xe1,0xfb,0x2c,0x09 = vcvttsd2si (%rcx), %rcx +0xc4,0xe1,0xfa,0x2c,0xcc = vcvttss2si %xmm4, %rcx +0xc4,0xe1,0xfa,0x2c,0x09 = vcvttss2si (%rcx), %rcx +0xc5,0x7f,0xf0,0x20 = vlddqu (%rax), %ymm12 +0xc4,0x41,0x7f,0x12,0xd4 = vmovddup %ymm12, %ymm10 +0xc5,0x7f,0x12,0x20 = vmovddup (%rax), %ymm12 +0xc4,0x41,0x7d,0x6f,0xd4 = vmovdqa %ymm12, %ymm10 +0xc5,0x7d,0x7f,0x20 = vmovdqa %ymm12, (%rax) +0xc5,0x7d,0x6f,0x20 = vmovdqa (%rax), %ymm12 +0xc4,0x41,0x7e,0x6f,0xd4 = vmovdqu %ymm12, %ymm10 +0xc5,0x7e,0x7f,0x20 = vmovdqu %ymm12, (%rax) +0xc5,0x7e,0x6f,0x20 = vmovdqu (%rax), %ymm12 +0xc4,0x41,0x7e,0x16,0xd4 = vmovshdup %ymm12, %ymm10 +0xc5,0x7e,0x16,0x20 = vmovshdup (%rax), %ymm12 +0xc4,0x41,0x7e,0x12,0xd4 = vmovsldup %ymm12, %ymm10 +0xc5,0x7e,0x12,0x20 = vmovsldup (%rax), %ymm12 +0xc4,0x42,0x7d,0x17,0xd4 = vptest %ymm12, %ymm10 +0xc4,0x62,0x7d,0x17,0x20 = vptest (%rax), %ymm12 +0xc4,0x43,0x7d,0x09,0xda,0x07 = vroundpd $7, %ymm10, %ymm11 +0xc4,0x63,0x7d,0x09,0x10,0x07 = vroundpd $7, (%rax), %ymm10 +0xc4,0x43,0x7d,0x08,0xda,0x07 = vroundps $7, %ymm10, %ymm11 +0xc4,0x63,0x7d,0x08,0x10,0x07 = vroundps $7, (%rax), %ymm10 +0xc4,0x41,0x2d,0xc6,0xdc,0x07 = vshufpd $7, %ymm12, %ymm10, %ymm11 +0xc5,0x2d,0xc6,0x18,0x07 = vshufpd $7, (%rax), %ymm10, %ymm11 +0xc4,0x41,0x2c,0xc6,0xdc,0x07 = vshufps $7, %ymm12, %ymm10, %ymm11 +0xc5,0x2c,0xc6,0x18,0x07 = vshufps $7, (%rax), %ymm10, %ymm11 +0xc4,0x42,0x79,0x0f,0xd4 = vtestpd %xmm12, %xmm10 +0xc4,0x42,0x7d,0x0f,0xd4 = vtestpd %ymm12, %ymm10 +0xc4,0x62,0x79,0x0f,0x20 = vtestpd (%rax), %xmm12 +0xc4,0x62,0x7d,0x0f,0x20 = vtestpd (%rax), %ymm12 +0xc4,0x42,0x79,0x0e,0xd4 = vtestps %xmm12, %xmm10 +0xc4,0x42,0x7d,0x0e,0xd4 = vtestps %ymm12, %ymm10 +0xc4,0x62,0x79,0x0e,0x20 = vtestps (%rax), %xmm12 +0xc4,0x62,0x7d,0x0e,0x20 = vtestps (%rax), %ymm12 +0xc4,0x43,0x79,0x17,0xc0,0x0a = vextractps $10, %xmm8, %r8d +0xc4,0xe3,0x79,0x17,0xe1,0x07 = vextractps $7, %xmm4, %ecx +0xc4,0xe1,0xf9,0x7e,0xe1 = vmovq %xmm4, %rcx +0xc5,0xf9,0x50,0xcc = vmovmskpd %xmm4, %ecx +0xc5,0xfd,0x50,0xcc = vmovmskpd %ymm4, %ecx +0xc5,0xf8,0x50,0xcc = vmovmskps %xmm4, %ecx +0xc5,0xfc,0x50,0xcc = vmovmskps %ymm4, %ecx +0xc4,0xe3,0x79,0x14,0xe1,0x07 = vpextrb $7, %xmm4, %ecx +0xc4,0x41,0x01,0xc4,0xc0,0x07 = vpinsrw $7, %r8d, %xmm15, %xmm8 +0xc5,0xd9,0xc4,0xf1,0x07 = vpinsrw $7, %ecx, %xmm4, %xmm6 +0xc5,0xf9,0xd7,0xcc = vpmovmskb %xmm4, %ecx +0xc4,0x63,0x1d,0x4b,0xac,0x20,0xad,0xde,0x00,0x00,0xb0 = vblendvpd %ymm11, 0xdead(%rax, %riz), %ymm12, %ymm13 +0xc4,0x81,0x78,0x29,0x1c,0x1e = vmovaps %xmm3, (%r14, %r11) +0xc4,0x81,0x78,0x28,0x1c,0x1e = vmovaps (%r14, %r11), %xmm3 +0xc4,0xc1,0x78,0x29,0x1c,0x1e = vmovaps %xmm3, (%r14, %rbx) +0xc4,0xc1,0x78,0x28,0x1c,0x1e = vmovaps (%r14, %rbx), %xmm3 +0xc4,0xa1,0x78,0x29,0x1c,0x18 = vmovaps %xmm3, (%rax, %r11) +0xc4,0xe2,0xf9,0x92,0x14,0x4f = vgatherdpd %xmm0, (%rdi, %xmm1, 2), %xmm2 +0xc4,0xe2,0xf9,0x93,0x14,0x4f = vgatherqpd %xmm0, (%rdi, %xmm1, 2), %xmm2 +0xc4,0xe2,0xfd,0x92,0x14,0x4f = vgatherdpd %ymm0, (%rdi, %xmm1, 2), %ymm2 +0xc4,0xe2,0xfd,0x93,0x14,0x4f = vgatherqpd %ymm0, (%rdi, %ymm1, 2), %ymm2 +0xc4,0x02,0x39,0x92,0x14,0x4f = vgatherdps %xmm8, (%r15, %xmm9, 2), %xmm10 +0xc4,0x02,0x39,0x93,0x14,0x4f = vgatherqps %xmm8, (%r15, %xmm9, 2), %xmm10 +0xc4,0x02,0x3d,0x92,0x14,0x4f = vgatherdps %ymm8, (%r15, %ymm9, 2), %ymm10 +0xc4,0x02,0x3d,0x93,0x14,0x4f = vgatherqps %xmm8, (%r15, %ymm9, 2), %xmm10 +0xc4,0xe2,0xf9,0x90,0x14,0x4f = vpgatherdq %xmm0, (%rdi, %xmm1, 2), %xmm2 +0xc4,0xe2,0xf9,0x91,0x14,0x4f = vpgatherqq %xmm0, (%rdi, %xmm1, 2), %xmm2 +0xc4,0xe2,0xfd,0x90,0x14,0x4f = vpgatherdq %ymm0, (%rdi, %xmm1, 2), %ymm2 +0xc4,0xe2,0xfd,0x91,0x14,0x4f = vpgatherqq %ymm0, (%rdi, %ymm1, 2), %ymm2 +0xc4,0x02,0x39,0x90,0x14,0x4f = vpgatherdd %xmm8, (%r15, %xmm9, 2), %xmm10 +0xc4,0x02,0x39,0x91,0x14,0x4f = vpgatherqd %xmm8, (%r15, %xmm9, 2), %xmm10 +0xc4,0x02,0x3d,0x90,0x14,0x4f = vpgatherdd %ymm8, (%r15, %ymm9, 2), %ymm10 +0xc4,0x02,0x3d,0x91,0x14,0x4f = vpgatherqd %xmm8, (%r15, %ymm9, 2), %xmm10 +0xc5,0x78,0x28,0xc0 = vmovaps %xmm0, %xmm8 +0xc5,0x78,0x29,0xc0 = vmovaps %xmm8, %xmm0 +0xc5,0x7c,0x28,0xc0 = vmovaps %ymm0, %ymm8 +0xc5,0x7c,0x29,0xc0 = vmovaps %ymm8, %ymm0 +0xc5,0x78,0x10,0xc0 = vmovups %xmm0, %xmm8 +0xc5,0x78,0x11,0xc0 = vmovups %xmm8, %xmm0 +0xc5,0x7c,0x10,0xc0 = vmovups %ymm0, %ymm8 +0xc5,0x7c,0x11,0xc0 = vmovups %ymm8, %ymm0 +0xc5,0x7a,0x10,0xc0 = vmovss %xmm0, %xmm0, %xmm8 +0xc5,0xba,0x10,0xc0 = vmovss %xmm0, %xmm8, %xmm0 +0xc5,0x7a,0x11,0xc0 = vmovss %xmm8, %xmm0, %xmm0 +0xc5,0x7b,0x10,0xc0 = vmovsd %xmm0, %xmm0, %xmm8 +0xc5,0xbb,0x10,0xc0 = vmovsd %xmm0, %xmm8, %xmm0 +0xc5,0x7b,0x11,0xc0 = vmovsd %xmm8, %xmm0, %xmm0 diff --git a/capstone/suite/MC/X86/x86_64-bmi-encoding.s.cs b/capstone/suite/MC/X86/x86_64-bmi-encoding.s.cs new file mode 100644 index 000000000..7151d2cf0 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-bmi-encoding.s.cs @@ -0,0 +1,51 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0xc4,0xc2,0x28,0xf3,0xd3 = blsmskl %r11d, %r10d +0xc4,0xc2,0xa8,0xf3,0xd3 = blsmskq %r11, %r10 +0xc4,0xe2,0x28,0xf3,0x10 = blsmskl (%rax), %r10d +0xc4,0xe2,0xa8,0xf3,0x10 = blsmskq (%rax), %r10 +0xc4,0xc2,0x28,0xf3,0xdb = blsil %r11d, %r10d +0xc4,0xc2,0xa8,0xf3,0xdb = blsiq %r11, %r10 +0xc4,0xe2,0x28,0xf3,0x18 = blsil (%rax), %r10d +0xc4,0xe2,0xa8,0xf3,0x18 = blsiq (%rax), %r10 +0xc4,0xc2,0x28,0xf3,0xcb = blsrl %r11d, %r10d +0xc4,0xc2,0xa8,0xf3,0xcb = blsrq %r11, %r10 +0xc4,0xe2,0x28,0xf3,0x08 = blsrl (%rax), %r10d +0xc4,0xe2,0xa8,0xf3,0x08 = blsrq (%rax), %r10 +0xc4,0x62,0x20,0xf2,0x10 = andnl (%rax), %r11d, %r10d +0xc4,0x62,0xa0,0xf2,0x10 = andnq (%rax), %r11, %r10 +0xc4,0x62,0x18,0xf7,0x10 = bextrl %r12d, (%rax), %r10d +0xc4,0x42,0x18,0xf7,0xd3 = bextrl %r12d, %r11d, %r10d +0xc4,0x62,0x98,0xf7,0x10 = bextrq %r12, (%rax), %r10 +0xc4,0x42,0x98,0xf7,0xd3 = bextrq %r12, %r11, %r10 +0xc4,0x62,0x18,0xf5,0x10 = bzhil %r12d, (%rax), %r10d +0xc4,0x42,0x18,0xf5,0xd3 = bzhil %r12d, %r11d, %r10d +0xc4,0x62,0x98,0xf5,0x10 = bzhiq %r12, (%rax), %r10 +0xc4,0x42,0x98,0xf5,0xd3 = bzhiq %r12, %r11, %r10 +0xc4,0x42,0x22,0xf5,0xd4 = pextl %r12d, %r11d, %r10d +0xc4,0x62,0x22,0xf5,0x10 = pextl (%rax), %r11d, %r10d +0xc4,0x42,0xa2,0xf5,0xd4 = pextq %r12, %r11, %r10 +0xc4,0x62,0xa2,0xf5,0x10 = pextq (%rax), %r11, %r10 +0xc4,0x42,0x23,0xf5,0xd4 = pdepl %r12d, %r11d, %r10d +0xc4,0x62,0x23,0xf5,0x10 = pdepl (%rax), %r11d, %r10d +0xc4,0x42,0xa3,0xf5,0xd4 = pdepq %r12, %r11, %r10 +0xc4,0x62,0xa3,0xf5,0x10 = pdepq (%rax), %r11, %r10 +0xc4,0x42,0x23,0xf6,0xd4 = mulxl %r12d, %r11d, %r10d +0xc4,0x62,0x23,0xf6,0x10 = mulxl (%rax), %r11d, %r10d +0xc4,0x42,0xa3,0xf6,0xd4 = mulxq %r12, %r11, %r10 +0xc4,0x62,0xa3,0xf6,0x10 = mulxq (%rax), %r11, %r10 +0xc4,0x43,0x7b,0xf0,0xd4,0x0a = rorxl $10, %r12d, %r10d +0xc4,0x63,0x7b,0xf0,0x10,0x1f = rorxl $31, (%rax), %r10d +0xc4,0x43,0xfb,0xf0,0xd4,0x01 = rorxq $1, %r12, %r10 +0xc4,0x63,0xfb,0xf0,0x10,0x3f = rorxq $63, (%rax), %r10 +0xc4,0x62,0x19,0xf7,0x10 = shlxl %r12d, (%rax), %r10d +0xc4,0x42,0x19,0xf7,0xd3 = shlxl %r12d, %r11d, %r10d +0xc4,0x62,0x99,0xf7,0x10 = shlxq %r12, (%rax), %r10 +0xc4,0x42,0x99,0xf7,0xd3 = shlxq %r12, %r11, %r10 +0xc4,0x62,0x1a,0xf7,0x10 = sarxl %r12d, (%rax), %r10d +0xc4,0x42,0x1a,0xf7,0xd3 = sarxl %r12d, %r11d, %r10d +0xc4,0x62,0x9a,0xf7,0x10 = sarxq %r12, (%rax), %r10 +0xc4,0x42,0x9a,0xf7,0xd3 = sarxq %r12, %r11, %r10 +0xc4,0x62,0x1b,0xf7,0x10 = shrxl %r12d, (%rax), %r10d +0xc4,0x42,0x1b,0xf7,0xd3 = shrxl %r12d, %r11d, %r10d +0xc4,0x62,0x9b,0xf7,0x10 = shrxq %r12, (%rax), %r10 +0xc4,0x42,0x9b,0xf7,0xd3 = shrxq %r12, %r11, %r10 diff --git a/capstone/suite/MC/X86/x86_64-encoding.s.cs b/capstone/suite/MC/X86/x86_64-encoding.s.cs new file mode 100644 index 000000000..a8e04f8a6 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-encoding.s.cs @@ -0,0 +1,59 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x65,0x48,0x8b,0x07 = movq %gs:(%rdi), %rax +0xf2,0x0f,0x38,0xf0,0xc3 = crc32b %bl, %eax +0xf2,0x0f,0x38,0xf0,0x43,0x04 = crc32b 4(%rbx), %eax +0x66,0xf2,0x0f,0x38,0xf1,0xc3 = crc32w %bx, %eax +0x66,0xf2,0x0f,0x38,0xf1,0x43,0x04 = crc32w 4(%rbx), %eax +0xf2,0x0f,0x38,0xf1,0xc3 = crc32l %ebx, %eax +0xf2,0x0f,0x38,0xf1,0x43,0x04 = crc32l 4(%rbx), %eax +0xf2,0x0f,0x38,0xf1,0x8c,0xcb,0xef,0xbe,0xad,0xde = crc32l -0x21524111(%rbx, %rcx, 8), %ecx +0xf2,0x0f,0x38,0xf1,0x0c,0x25,0x45,0x00,0x00,0x00 = crc32l 0x45, %ecx +0xf2,0x0f,0x38,0xf1,0x0c,0x25,0xed,0x7e,0x00,0x00 = crc32l 0x7eed, %ecx +0xf2,0x0f,0x38,0xf1,0x0c,0x25,0xfe,0xca,0xbe,0xba = crc32l 0xffffffffbabecafe, %ecx +0xf2,0x0f,0x38,0xf1,0xc9 = crc32l %ecx, %ecx +0xf2,0x41,0x0f,0x38,0xf0,0xc3 = crc32b %r11b, %eax +0xf2,0x0f,0x38,0xf0,0x43,0x04 = crc32b 4(%rbx), %eax +0xf2,0x48,0x0f,0x38,0xf0,0xc7 = crc32b %dil, %rax +0xf2,0x49,0x0f,0x38,0xf0,0xc3 = crc32b %r11b, %rax +0xf2,0x48,0x0f,0x38,0xf0,0x43,0x04 = crc32b 4(%rbx), %rax +0xf2,0x48,0x0f,0x38,0xf1,0xc3 = crc32q %rbx, %rax +0xf2,0x48,0x0f,0x38,0xf1,0x43,0x04 = crc32q 4(%rbx), %rax +0x49,0x0f,0x6e,0xc8 = movq %r8, %mm1 +0x41,0x0f,0x6e,0xc8 = movd %r8d, %mm1 +0x48,0x0f,0x6e,0xca = movq %rdx, %mm1 +0x0f,0x6e,0xca = movd %edx, %mm1 +0x49,0x0f,0x7e,0xc8 = movq %mm1, %r8 +0x41,0x0f,0x7e,0xc8 = movd %mm1, %r8d +0x48,0x0f,0x7e,0xca = movq %mm1, %rdx +0x0f,0x7e,0xca = movd %mm1, %edx +0x0f,0x3a,0xcc,0xd1,0x01 = sha1rnds4 $1, %xmm1, %xmm2 +0x0f,0x3a,0xcc,0x10,0x01 = sha1rnds4 $1, (%rax), %xmm2 +0x0f,0x38,0xc8,0xd1 = sha1nexte %xmm1, %xmm2 +0x0f,0x38,0xc9,0xd1 = sha1msg1 %xmm1, %xmm2 +0x0f,0x38,0xc9,0x10 = sha1msg1 (%rax), %xmm2 +0x0f,0x38,0xca,0xd1 = sha1msg2 %xmm1, %xmm2 +0x0f,0x38,0xca,0x10 = sha1msg2 (%rax), %xmm2 +0x0f,0x38,0xcb,0x10 = sha256rnds2 %xmm0, (%rax), %xmm2 +0x0f,0x38,0xcb,0xd1 = sha256rnds2 %xmm0, %xmm1, %xmm2 +0x0f,0x38,0xcb,0x10 = sha256rnds2 %xmm0, (%rax), %xmm2 +0x0f,0x38,0xcb,0xd1 = sha256rnds2 %xmm0, %xmm1, %xmm2 +0x0f,0x38,0xcc,0xd1 = sha256msg1 %xmm1, %xmm2 +0x0f,0x38,0xcc,0x10 = sha256msg1 (%rax), %xmm2 +0x0f,0x38,0xcd,0xd1 = sha256msg2 %xmm1, %xmm2 +0x0f,0x38,0xcd,0x10 = sha256msg2 (%rax), %xmm2 +0x48,0x8b,0x1c,0x25,0xad,0xde,0x00,0x00 = movq 0xdead, %rbx +0x48,0x8b,0x04,0x25,0xef,0xbe,0x00,0x00 = movq 0xbeef, %rax +0x48,0x8b,0x04,0xe5,0xfc,0xff,0xff,0xff = movq -4(, %riz, 8), %rax +0x48,0x8b,0x04,0x21 = movq (%rcx, %riz), %rax +0x48,0x8b,0x04,0xe1 = movq (%rcx, %riz, 8), %rax +0x48,0x0f,0xae,0x00 = fxsave64 (%rax) +0x48,0x0f,0xae,0x08 = fxrstor64 (%rax) +0xc9 = leave +0xc9 = leave +0x67,0xd9,0x07 = flds (%edi) +0x67,0xdf,0x07 = filds (%edi) +0xd9,0x07 = flds (%rdi) +0xdf,0x07 = filds (%rdi) +0x66,0x0f,0xd7,0xcd = pmovmskb %xmm5, %ecx +0x66,0x0f,0xc4,0xe9,0x03 = pinsrw $3, %ecx, %xmm5 +0x66,0x0f,0xc4,0xe9,0x03 = pinsrw $3, %ecx, %xmm5 diff --git a/capstone/suite/MC/X86/x86_64-fma3-encoding.s.cs b/capstone/suite/MC/X86/x86_64-fma3-encoding.s.cs new file mode 100644 index 000000000..389e36800 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-fma3-encoding.s.cs @@ -0,0 +1,169 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0xc4,0x42,0xa9,0x98,0xdc = vfmadd132pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0x98,0x18 = vfmadd132pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0x98,0xdc = vfmadd132ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x98,0x18 = vfmadd132ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xa8,0xdc = vfmadd213pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xa8,0x18 = vfmadd213pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xa8,0xdc = vfmadd213ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xa8,0x18 = vfmadd213ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xb8,0xdc = vfmadd231pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xb8,0x18 = vfmadd231pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xb8,0xdc = vfmadd231ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xb8,0x18 = vfmadd231ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xad,0x98,0xdc = vfmadd132pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0x98,0x18 = vfmadd132pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0x98,0xdc = vfmadd132ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0x98,0x18 = vfmadd132ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xa8,0xdc = vfmadd213pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xa8,0x18 = vfmadd213pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xa8,0xdc = vfmadd213ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xa8,0x18 = vfmadd213ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xb8,0xdc = vfmadd231pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xb8,0x18 = vfmadd231pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xb8,0xdc = vfmadd231ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xb8,0x18 = vfmadd231ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xa9,0x98,0xdc = vfmadd132pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0x98,0x18 = vfmadd132pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0x98,0xdc = vfmadd132ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x98,0x18 = vfmadd132ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xa8,0xdc = vfmadd213pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xa8,0x18 = vfmadd213pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xa8,0xdc = vfmadd213ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xa8,0x18 = vfmadd213ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xb8,0xdc = vfmadd231pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xb8,0x18 = vfmadd231pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xb8,0xdc = vfmadd231ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xb8,0x18 = vfmadd231ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0x96,0xdc = vfmaddsub132pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0x96,0x18 = vfmaddsub132pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0x96,0xdc = vfmaddsub132ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x96,0x18 = vfmaddsub132ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xa6,0xdc = vfmaddsub213pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xa6,0x18 = vfmaddsub213pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xa6,0xdc = vfmaddsub213ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xa6,0x18 = vfmaddsub213ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xb6,0xdc = vfmaddsub231pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xb6,0x18 = vfmaddsub231pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xb6,0xdc = vfmaddsub231ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xb6,0x18 = vfmaddsub231ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0x97,0xdc = vfmsubadd132pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0x97,0x18 = vfmsubadd132pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0x97,0xdc = vfmsubadd132ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x97,0x18 = vfmsubadd132ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xa7,0xdc = vfmsubadd213pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xa7,0x18 = vfmsubadd213pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xa7,0xdc = vfmsubadd213ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xa7,0x18 = vfmsubadd213ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xb7,0xdc = vfmsubadd231pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xb7,0x18 = vfmsubadd231pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xb7,0xdc = vfmsubadd231ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xb7,0x18 = vfmsubadd231ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0x9a,0xdc = vfmsub132pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0x9a,0x18 = vfmsub132pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0x9a,0xdc = vfmsub132ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x9a,0x18 = vfmsub132ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xaa,0xdc = vfmsub213pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xaa,0x18 = vfmsub213pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xaa,0xdc = vfmsub213ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xaa,0x18 = vfmsub213ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xba,0xdc = vfmsub231pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xba,0x18 = vfmsub231pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xba,0xdc = vfmsub231ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xba,0x18 = vfmsub231ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0x9c,0xdc = vfnmadd132pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0x9c,0x18 = vfnmadd132pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0x9c,0xdc = vfnmadd132ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x9c,0x18 = vfnmadd132ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xac,0xdc = vfnmadd213pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xac,0x18 = vfnmadd213pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xac,0xdc = vfnmadd213ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xac,0x18 = vfnmadd213ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xbc,0xdc = vfnmadd231pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xbc,0x18 = vfnmadd231pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xbc,0xdc = vfnmadd231ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xbc,0x18 = vfnmadd231ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0x9e,0xdc = vfnmsub132pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0x9e,0x18 = vfnmsub132pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0x9e,0xdc = vfnmsub132ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0x9e,0x18 = vfnmsub132ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xae,0xdc = vfnmsub213pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xae,0x18 = vfnmsub213pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xae,0xdc = vfnmsub213ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xae,0x18 = vfnmsub213ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xa9,0xbe,0xdc = vfnmsub231pd %xmm12, %xmm10, %xmm11 +0xc4,0x62,0xa9,0xbe,0x18 = vfnmsub231pd (%rax), %xmm10, %xmm11 +0xc4,0x42,0x29,0xbe,0xdc = vfnmsub231ps %xmm12, %xmm10, %xmm11 +0xc4,0x62,0x29,0xbe,0x18 = vfnmsub231ps (%rax), %xmm10, %xmm11 +0xc4,0x42,0xad,0x98,0xdc = vfmadd132pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0x98,0x18 = vfmadd132pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0x98,0xdc = vfmadd132ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0x98,0x18 = vfmadd132ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xa8,0xdc = vfmadd213pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xa8,0x18 = vfmadd213pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xa8,0xdc = vfmadd213ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xa8,0x18 = vfmadd213ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xb8,0xdc = vfmadd231pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xb8,0x18 = vfmadd231pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xb8,0xdc = vfmadd231ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xb8,0x18 = vfmadd231ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0x96,0xdc = vfmaddsub132pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0x96,0x18 = vfmaddsub132pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0x96,0xdc = vfmaddsub132ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0x96,0x18 = vfmaddsub132ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xa6,0xdc = vfmaddsub213pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xa6,0x18 = vfmaddsub213pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xa6,0xdc = vfmaddsub213ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xa6,0x18 = vfmaddsub213ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xb6,0xdc = vfmaddsub231pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xb6,0x18 = vfmaddsub231pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xb6,0xdc = vfmaddsub231ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xb6,0x18 = vfmaddsub231ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0x97,0xdc = vfmsubadd132pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0x97,0x18 = vfmsubadd132pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0x97,0xdc = vfmsubadd132ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0x97,0x18 = vfmsubadd132ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xa7,0xdc = vfmsubadd213pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xa7,0x18 = vfmsubadd213pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xa7,0xdc = vfmsubadd213ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xa7,0x18 = vfmsubadd213ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xb7,0xdc = vfmsubadd231pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xb7,0x18 = vfmsubadd231pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xb7,0xdc = vfmsubadd231ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xb7,0x18 = vfmsubadd231ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0x9a,0xdc = vfmsub132pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0x9a,0x18 = vfmsub132pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0x9a,0xdc = vfmsub132ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0x9a,0x18 = vfmsub132ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xaa,0xdc = vfmsub213pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xaa,0x18 = vfmsub213pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xaa,0xdc = vfmsub213ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xaa,0x18 = vfmsub213ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xba,0xdc = vfmsub231pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xba,0x18 = vfmsub231pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xba,0xdc = vfmsub231ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xba,0x18 = vfmsub231ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0x9c,0xdc = vfnmadd132pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0x9c,0x18 = vfnmadd132pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0x9c,0xdc = vfnmadd132ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0x9c,0x18 = vfnmadd132ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xac,0xdc = vfnmadd213pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xac,0x18 = vfnmadd213pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xac,0xdc = vfnmadd213ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xac,0x18 = vfnmadd213ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xbc,0xdc = vfnmadd231pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xbc,0x18 = vfnmadd231pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xbc,0xdc = vfnmadd231ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xbc,0x18 = vfnmadd231ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0x9e,0xdc = vfnmsub132pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0x9e,0x18 = vfnmsub132pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0x9e,0xdc = vfnmsub132ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0x9e,0x18 = vfnmsub132ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xae,0xdc = vfnmsub213pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xae,0x18 = vfnmsub213pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xae,0xdc = vfnmsub213ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xae,0x18 = vfnmsub213ps (%rax), %ymm10, %ymm11 +0xc4,0x42,0xad,0xbe,0xdc = vfnmsub231pd %ymm12, %ymm10, %ymm11 +0xc4,0x62,0xad,0xbe,0x18 = vfnmsub231pd (%rax), %ymm10, %ymm11 +0xc4,0x42,0x2d,0xbe,0xdc = vfnmsub231ps %ymm12, %ymm10, %ymm11 +0xc4,0x62,0x2d,0xbe,0x18 = vfnmsub231ps (%rax), %ymm10, %ymm11 diff --git a/capstone/suite/MC/X86/x86_64-fma4-encoding.s.cs b/capstone/suite/MC/X86/x86_64-fma4-encoding.s.cs new file mode 100644 index 000000000..c3eef0e6f --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-fma4-encoding.s.cs @@ -0,0 +1,98 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0xc4,0xe3,0xf9,0x6a,0x01,0x10 = vfmaddss (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x6a,0x01,0x10 = vfmaddss %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6a,0xc2,0x10 = vfmaddss %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6b,0x01,0x10 = vfmaddsd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x6b,0x01,0x10 = vfmaddsd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6b,0xc2,0x10 = vfmaddsd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xc3,0xf9,0x6b,0xc2,0x10 = vfmaddsd %xmm10, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x68,0x01,0x10 = vfmaddps (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x68,0x01,0x10 = vfmaddps %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x68,0xc2,0x10 = vfmaddps %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x69,0x01,0x10 = vfmaddpd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x69,0x01,0x10 = vfmaddpd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x69,0xc2,0x10 = vfmaddpd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xfd,0x68,0x01,0x10 = vfmaddps (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x68,0x01,0x10 = vfmaddps %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x68,0xc2,0x10 = vfmaddps %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x69,0x01,0x10 = vfmaddpd (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x69,0x01,0x10 = vfmaddpd %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x69,0xc2,0x10 = vfmaddpd %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xf9,0x6e,0x01,0x10 = vfmsubss (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x6e,0x01,0x10 = vfmsubss %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6e,0xc2,0x10 = vfmsubss %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6f,0x01,0x10 = vfmsubsd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x6f,0x01,0x10 = vfmsubsd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6f,0xc2,0x10 = vfmsubsd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6c,0x01,0x10 = vfmsubps (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x6c,0x01,0x10 = vfmsubps %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6c,0xc2,0x10 = vfmsubps %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6d,0x01,0x10 = vfmsubpd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x6d,0x01,0x10 = vfmsubpd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x6d,0xc2,0x10 = vfmsubpd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xfd,0x6c,0x01,0x10 = vfmsubps (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x6c,0x01,0x10 = vfmsubps %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x6c,0xc2,0x10 = vfmsubps %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x6d,0x01,0x10 = vfmsubpd (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x6d,0x01,0x10 = vfmsubpd %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x6d,0xc2,0x10 = vfmsubpd %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xf9,0x7a,0x01,0x10 = vfnmaddss (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x7a,0x01,0x10 = vfnmaddss %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7a,0xc2,0x10 = vfnmaddss %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7b,0x01,0x10 = vfnmaddsd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x7b,0x01,0x10 = vfnmaddsd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7b,0xc2,0x10 = vfnmaddsd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x78,0x01,0x10 = vfnmaddps (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x78,0x01,0x10 = vfnmaddps %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x78,0xc2,0x10 = vfnmaddps %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x79,0x01,0x10 = vfnmaddpd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x79,0x01,0x10 = vfnmaddpd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x79,0xc2,0x10 = vfnmaddpd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xfd,0x78,0x01,0x10 = vfnmaddps (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x78,0x01,0x10 = vfnmaddps %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x78,0xc2,0x10 = vfnmaddps %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x79,0x01,0x10 = vfnmaddpd (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x79,0x01,0x10 = vfnmaddpd %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x79,0xc2,0x10 = vfnmaddpd %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xf9,0x7e,0x01,0x10 = vfnmsubss (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x7e,0x01,0x10 = vfnmsubss %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7e,0xc2,0x10 = vfnmsubss %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7f,0x01,0x10 = vfnmsubsd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x7f,0x01,0x10 = vfnmsubsd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7f,0xc2,0x10 = vfnmsubsd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7c,0x01,0x10 = vfnmsubps (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x7c,0x01,0x10 = vfnmsubps %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7c,0xc2,0x10 = vfnmsubps %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7d,0x01,0x10 = vfnmsubpd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x7d,0x01,0x10 = vfnmsubpd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x7d,0xc2,0x10 = vfnmsubpd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xfd,0x7c,0x01,0x10 = vfnmsubps (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x7c,0x01,0x10 = vfnmsubps %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x7c,0xc2,0x10 = vfnmsubps %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x7d,0x01,0x10 = vfnmsubpd (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x7d,0x01,0x10 = vfnmsubpd %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x7d,0xc2,0x10 = vfnmsubpd %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xf9,0x5c,0x01,0x10 = vfmaddsubps (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x5c,0x01,0x10 = vfmaddsubps %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x5c,0xc2,0x10 = vfmaddsubps %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x5d,0x01,0x10 = vfmaddsubpd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x5d,0x01,0x10 = vfmaddsubpd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x5d,0xc2,0x10 = vfmaddsubpd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xfd,0x5c,0x01,0x10 = vfmaddsubps (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x5c,0x01,0x10 = vfmaddsubps %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x5c,0xc2,0x10 = vfmaddsubps %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x5d,0x01,0x10 = vfmaddsubpd (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x5d,0x01,0x10 = vfmaddsubpd %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x5d,0xc2,0x10 = vfmaddsubpd %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xf9,0x5e,0x01,0x10 = vfmsubaddps (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x5e,0x01,0x10 = vfmsubaddps %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x5e,0xc2,0x10 = vfmsubaddps %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x5f,0x01,0x10 = vfmsubaddpd (%rcx), %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0x79,0x5f,0x01,0x10 = vfmsubaddpd %xmm1, (%rcx), %xmm0, %xmm0 +0xc4,0xe3,0xf9,0x5f,0xc2,0x10 = vfmsubaddpd %xmm2, %xmm1, %xmm0, %xmm0 +0xc4,0xe3,0xfd,0x5e,0x01,0x10 = vfmsubaddps (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x5e,0x01,0x10 = vfmsubaddps %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x5e,0xc2,0x10 = vfmsubaddps %ymm2, %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x5f,0x01,0x10 = vfmsubaddpd (%rcx), %ymm1, %ymm0, %ymm0 +0xc4,0xe3,0x7d,0x5f,0x01,0x10 = vfmsubaddpd %ymm1, (%rcx), %ymm0, %ymm0 +0xc4,0xe3,0xfd,0x5f,0xc2,0x10 = vfmsubaddpd %ymm2, %ymm1, %ymm0, %ymm0 diff --git a/capstone/suite/MC/X86/x86_64-imm-widths.s.cs b/capstone/suite/MC/X86/x86_64-imm-widths.s.cs new file mode 100644 index 000000000..4f2df32f6 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-imm-widths.s.cs @@ -0,0 +1,27 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x04,0x00 = addb $0x00, %al +0x04,0x7f = addb $0x7F, %al +0x04,0x80 = addb $0x80, %al +0x04,0xff = addb $0xFF, %al +0x66,0x83,0xc0,0x00 = addw $0x0000, %ax +0x66,0x83,0xc0,0x7f = addw $0x007F, %ax +0x66,0x83,0xc0,0x80 = addw $-0x80, %ax +0x66,0x83,0xc0,0xff = addw $-1, %ax +0x83,0xc0,0x00 = addl $0x00000000, %eax +0x83,0xc0,0x7f = addl $0x0000007F, %eax +0x05,0x80,0xff,0x00,0x00 = addl $0xFF80, %eax +0x05,0xff,0xff,0x00,0x00 = addl $0xFFFF, %eax +0x83,0xc0,0x80 = addl $-0x80, %eax +0x83,0xc0,0xff = addl $-1, %eax +0x48,0x83,0xc0,0x00 = addq $0x0000000000000000, %rax +0x48,0x83,0xc0,0x7f = addq $0x000000000000007F, %rax +0x48,0x83,0xc0,0x80 = addq $0xFFFFFFFFFFFFFF80, %rax +0x48,0x83,0xc0,0xff = addq $0xFFFFFFFFFFFFFFFF, %rax +0x48,0x83,0xc0,0x00 = addq $0x0000000000000000, %rax +0x48,0x05,0x80,0xff,0x00,0x00 = addq $0xFF80, %rax +0x48,0x05,0xff,0xff,0x00,0x00 = addq $0xFFFF, %rax +0x48,0xb8,0x80,0xff,0xff,0xff,0x00,0x00,0x00,0x00 = movabsq $0xFFFFFF80, %rax +0x48,0xb8,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00 = movabsq $0xFFFFFFFF, %rax +0x48,0x05,0xff,0xff,0xff,0x7f = addq $0x000000007FFFFFFF, %rax +0x48,0x05,0x00,0x00,0x00,0x80 = addq $0xFFFFFFFF80000000, %rax +0x48,0x05,0x00,0xff,0xff,0xff = addq $0xFFFFFFFFFFFFFF00, %rax diff --git a/capstone/suite/MC/X86/x86_64-rand-encoding.s.cs b/capstone/suite/MC/X86/x86_64-rand-encoding.s.cs new file mode 100644 index 000000000..23b1b2238 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-rand-encoding.s.cs @@ -0,0 +1,13 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x66,0x0f,0xc7,0xf0 = rdrandw %ax +0x0f,0xc7,0xf0 = rdrandl %eax +0x48,0x0f,0xc7,0xf0 = rdrandq %rax +0x66,0x41,0x0f,0xc7,0xf3 = rdrandw %r11w +0x41,0x0f,0xc7,0xf3 = rdrandl %r11d +0x49,0x0f,0xc7,0xf3 = rdrandq %r11 +0x66,0x0f,0xc7,0xf8 = rdseedw %ax +0x0f,0xc7,0xf8 = rdseedl %eax +0x48,0x0f,0xc7,0xf8 = rdseedq %rax +0x66,0x41,0x0f,0xc7,0xfb = rdseedw %r11w +0x41,0x0f,0xc7,0xfb = rdseedl %r11d +0x49,0x0f,0xc7,0xfb = rdseedq %r11 diff --git a/capstone/suite/MC/X86/x86_64-rtm-encoding.s.cs b/capstone/suite/MC/X86/x86_64-rtm-encoding.s.cs new file mode 100644 index 000000000..0695dc296 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-rtm-encoding.s.cs @@ -0,0 +1,4 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x0f,0x01,0xd5 = xend +0x0f,0x01,0xd6 = xtest +0xc6,0xf8,0x0d = xabort $13 diff --git a/capstone/suite/MC/X86/x86_64-sse4a.s.cs b/capstone/suite/MC/X86/x86_64-sse4a.s.cs new file mode 100644 index 000000000..6403f1404 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-sse4a.s.cs @@ -0,0 +1 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT diff --git a/capstone/suite/MC/X86/x86_64-tbm-encoding.s.cs b/capstone/suite/MC/X86/x86_64-tbm-encoding.s.cs new file mode 100644 index 000000000..a8d6fccd2 --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-tbm-encoding.s.cs @@ -0,0 +1,40 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x8f,0xea,0x78,0x10,0xc7,0xfe,0x0a,0x00,0x00 = bextrl $2814, %edi, %eax +0x8f,0xea,0x78,0x10,0x07,0xfe,0x0a,0x00,0x00 = bextrl $2814, (%rdi), %eax +0x8f,0xea,0xf8,0x10,0xc7,0xfe,0x0a,0x00,0x00 = bextrq $2814, %rdi, %rax +0x8f,0xea,0xf8,0x10,0x07,0xfe,0x0a,0x00,0x00 = bextrq $2814, (%rdi), %rax +0x8f,0xe9,0x78,0x01,0xcf = blcfilll %edi, %eax +0x8f,0xe9,0x78,0x01,0x0f = blcfilll (%rdi), %eax +0x8f,0xe9,0xf8,0x01,0xcf = blcfillq %rdi, %rax +0x8f,0xe9,0xf8,0x01,0x0f = blcfillq (%rdi), %rax +0x8f,0xe9,0x78,0x02,0xf7 = blcil %edi, %eax +0x8f,0xe9,0x78,0x02,0x37 = blcil (%rdi), %eax +0x8f,0xe9,0xf8,0x02,0xf7 = blciq %rdi, %rax +0x8f,0xe9,0xf8,0x02,0x37 = blciq (%rdi), %rax +0x8f,0xe9,0x78,0x01,0xef = blcicl %edi, %eax +0x8f,0xe9,0x78,0x01,0x2f = blcicl (%rdi), %eax +0x8f,0xe9,0xf8,0x01,0xef = blcicq %rdi, %rax +0x8f,0xe9,0xf8,0x01,0x2f = blcicq (%rdi), %rax +0x8f,0xe9,0x78,0x02,0xcf = blcmskl %edi, %eax +0x8f,0xe9,0x78,0x02,0x0f = blcmskl (%rdi), %eax +0x8f,0xe9,0xf8,0x02,0xcf = blcmskq %rdi, %rax +0x8f,0xe9,0xf8,0x02,0x0f = blcmskq (%rdi), %rax +0x8f,0xe9,0x78,0x01,0xdf = blcsl %edi, %eax +0x8f,0xe9,0x78,0x01,0x1f = blcsl (%rdi), %eax +0x8f,0xe9,0xf8,0x01,0xdf = blcsq %rdi, %rax +0x8f,0xe9,0xf8,0x01,0x1f = blcsq (%rdi), %rax +0x8f,0xe9,0x78,0x01,0xd7 = blsfilll %edi, %eax +0x8f,0xe9,0x78,0x01,0x17 = blsfilll (%rdi), %eax +0x8f,0xe9,0xf8,0x01,0xd7 = blsfillq %rdi, %rax +0x8f,0xe9,0xf8,0x01,0x17 = blsfillq (%rdi), %rax +0x8f,0xe9,0x78,0x01,0xf7 = blsicl %edi, %eax +0x8f,0xe9,0x78,0x01,0x37 = blsicl (%rdi), %eax +0x8f,0xe9,0xf8,0x01,0xf7 = blsicq %rdi, %rax +0x8f,0xe9,0x78,0x01,0xff = t1mskcl %edi, %eax +0x8f,0xe9,0x78,0x01,0x3f = t1mskcl (%rdi), %eax +0x8f,0xe9,0xf8,0x01,0xff = t1mskcq %rdi, %rax +0x8f,0xe9,0xf8,0x01,0x3f = t1mskcq (%rdi), %rax +0x8f,0xe9,0x78,0x01,0xe7 = tzmskl %edi, %eax +0x8f,0xe9,0x78,0x01,0x27 = tzmskl (%rdi), %eax +0x8f,0xe9,0xf8,0x01,0xe7 = tzmskq %rdi, %rax +0x8f,0xe9,0xf8,0x01,0x27 = tzmskq (%rdi), %rax diff --git a/capstone/suite/MC/X86/x86_64-xop-encoding.s.cs b/capstone/suite/MC/X86/x86_64-xop-encoding.s.cs new file mode 100644 index 000000000..4adebcbcc --- /dev/null +++ b/capstone/suite/MC/X86/x86_64-xop-encoding.s.cs @@ -0,0 +1,152 @@ +# CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT +0x8f,0xe9,0x78,0xe2,0x0c,0x01 = vphsubwd (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0xe2,0xc8 = vphsubwd %xmm0, %xmm1 +0x8f,0xe9,0x78,0xe3,0x0c,0x01 = vphsubdq (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0xe3,0xc8 = vphsubdq %xmm0, %xmm1 +0x8f,0xe9,0x78,0xe1,0x08 = vphsubbw (%rax), %xmm1 +0x8f,0xe9,0x78,0xe1,0xca = vphsubbw %xmm2, %xmm1 +0x8f,0xe9,0x78,0xc7,0x21 = vphaddwq (%rcx), %xmm4 +0x8f,0xe9,0x78,0xc7,0xd6 = vphaddwq %xmm6, %xmm2 +0x8f,0xe9,0x78,0xc6,0x3c,0x02 = vphaddwd (%rdx, %rax), %xmm7 +0x8f,0xe9,0x78,0xc6,0xe3 = vphaddwd %xmm3, %xmm4 +0x8f,0xe9,0x78,0xd7,0x34,0x01 = vphadduwq (%rcx, %rax), %xmm6 +0x8f,0xe9,0x78,0xd7,0xc7 = vphadduwq %xmm7, %xmm0 +0x8f,0xe9,0x78,0xd6,0x28 = vphadduwd (%rax), %xmm5 +0x8f,0xe9,0x78,0xd6,0xca = vphadduwd %xmm2, %xmm1 +0x8f,0xe9,0x78,0xdb,0x64,0x01,0x08 = vphaddudq 8(%rcx, %rax), %xmm4 +0x8f,0xe9,0x78,0xdb,0xd6 = vphaddudq %xmm6, %xmm2 +0x8f,0xe9,0x78,0xd1,0x19 = vphaddubw (%rcx), %xmm3 +0x8f,0xe9,0x78,0xd1,0xc5 = vphaddubw %xmm5, %xmm0 +0x8f,0xe9,0x78,0xd3,0x21 = vphaddubq (%rcx), %xmm4 +0x8f,0xe9,0x78,0xd3,0xd2 = vphaddubq %xmm2, %xmm2 +0x8f,0xe9,0x78,0xd2,0x28 = vphaddubd (%rax), %xmm5 +0x8f,0xe9,0x78,0xd2,0xfd = vphaddubd %xmm5, %xmm7 +0x8f,0xe9,0x78,0xcb,0x22 = vphadddq (%rdx), %xmm4 +0x8f,0xe9,0x78,0xcb,0xec = vphadddq %xmm4, %xmm5 +0x8f,0xe9,0x78,0xc1,0x0c,0x01 = vphaddbw (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0xc1,0xf5 = vphaddbw %xmm5, %xmm6 +0x8f,0xe9,0x78,0xc3,0x0c,0x01 = vphaddbq (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0xc3,0xc2 = vphaddbq %xmm2, %xmm0 +0x8f,0xe9,0x78,0xc2,0x0c,0x01 = vphaddbd (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0xc2,0xd9 = vphaddbd %xmm1, %xmm3 +0x8f,0xe9,0x78,0x82,0x0c,0x01 = vfrczss (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0x82,0xfd = vfrczss %xmm5, %xmm7 +0x8f,0xe9,0x78,0x83,0x0c,0x01 = vfrczsd (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0x83,0xc7 = vfrczsd %xmm7, %xmm0 +0x8f,0xe9,0x78,0x80,0x58,0x04 = vfrczps 4(%rax), %xmm3 +0x8f,0xe9,0x78,0x80,0xee = vfrczps %xmm6, %xmm5 +0x8f,0xe9,0x78,0x80,0x09 = vfrczps (%rcx), %xmm1 +0x8f,0xe9,0x7c,0x80,0xe2 = vfrczps %ymm2, %ymm4 +0x8f,0xe9,0x78,0x81,0x0c,0x01 = vfrczpd (%rcx, %rax), %xmm1 +0x8f,0xe9,0x78,0x81,0xc7 = vfrczpd %xmm7, %xmm0 +0x8f,0xe9,0x7c,0x81,0x14,0x01 = vfrczpd (%rcx, %rax), %ymm2 +0x8f,0xe9,0x7c,0x81,0xdd = vfrczpd %ymm5, %ymm3 +0x8f,0xe9,0x78,0x95,0xd1 = vpshlw %xmm0, %xmm1, %xmm2 +0x8f,0xe9,0xf0,0x95,0x10 = vpshlw (%rax), %xmm1, %xmm2 +0x8f,0xe9,0x78,0x95,0x14,0x08 = vpshlw %xmm0, (%rax, %rcx), %xmm2 +0x8f,0xe9,0x68,0x97,0xf4 = vpshlq %xmm2, %xmm4, %xmm6 +0x8f,0xe9,0xe8,0x97,0x09 = vpshlq (%rcx), %xmm2, %xmm1 +0x8f,0xe9,0x50,0x97,0x34,0x0a = vpshlq %xmm5, (%rdx, %rcx), %xmm6 +0x8f,0xe9,0x40,0x96,0xdd = vpshld %xmm7, %xmm5, %xmm3 +0x8f,0xe9,0xe0,0x96,0x58,0x04 = vpshld 4(%rax), %xmm3, %xmm3 +0x8f,0xe9,0x70,0x96,0x2c,0x08 = vpshld %xmm1, (%rax, %rcx), %xmm5 +0x8f,0xe9,0x70,0x94,0xda = vpshlb %xmm1, %xmm2, %xmm3 +0x8f,0xe9,0xf8,0x94,0x39 = vpshlb (%rcx), %xmm0, %xmm7 +0x8f,0xe9,0x68,0x94,0x1c,0x10 = vpshlb %xmm2, (%rax, %rdx), %xmm3 +0x8f,0xe9,0x40,0x99,0xdd = vpshaw %xmm7, %xmm5, %xmm3 +0x8f,0xe9,0xe8,0x99,0x08 = vpshaw (%rax), %xmm2, %xmm1 +0x8f,0xe9,0x78,0x99,0x5c,0x08,0x08 = vpshaw %xmm0, 8(%rax, %rcx), %xmm3 +0x8f,0xe9,0x58,0x9b,0xe4 = vpshaq %xmm4, %xmm4, %xmm4 +0x8f,0xe9,0xe8,0x9b,0x01 = vpshaq (%rcx), %xmm2, %xmm0 +0x8f,0xe9,0x48,0x9b,0x2c,0x08 = vpshaq %xmm6, (%rax, %rcx), %xmm5 +0x8f,0xe9,0x50,0x9a,0xc4 = vpshad %xmm5, %xmm4, %xmm0 +0x8f,0xe9,0xe8,0x9a,0x28 = vpshad (%rax), %xmm2, %xmm5 +0x8f,0xe9,0x68,0x9a,0x28 = vpshad %xmm2, (%rax), %xmm5 +0x8f,0xe9,0x70,0x98,0xc1 = vpshab %xmm1, %xmm1, %xmm0 +0x8f,0xe9,0xd8,0x98,0x01 = vpshab (%rcx), %xmm4, %xmm0 +0x8f,0xe9,0x50,0x98,0x19 = vpshab %xmm5, (%rcx), %xmm3 +0x8f,0xe9,0xe0,0x91,0x30 = vprotw (%rax), %xmm3, %xmm6 +0x8f,0xe9,0x50,0x91,0x0c,0x08 = vprotw %xmm5, (%rax, %rcx), %xmm1 +0x8f,0xe9,0x78,0x91,0xd1 = vprotw %xmm0, %xmm1, %xmm2 +0x8f,0xe8,0x78,0xc1,0x09,0x2a = vprotw $42, (%rcx), %xmm1 +0x8f,0xe8,0x78,0xc1,0x20,0x29 = vprotw $41, (%rax), %xmm4 +0x8f,0xe8,0x78,0xc1,0xd9,0x28 = vprotw $40, %xmm1, %xmm3 +0x8f,0xe9,0xf0,0x93,0x10 = vprotq (%rax), %xmm1, %xmm2 +0x8f,0xe9,0xf0,0x93,0x14,0x08 = vprotq (%rax, %rcx), %xmm1, %xmm2 +0x8f,0xe9,0x78,0x93,0xd1 = vprotq %xmm0, %xmm1, %xmm2 +0x8f,0xe8,0x78,0xc3,0x10,0x2a = vprotq $42, (%rax), %xmm2 +0x8f,0xe8,0x78,0xc3,0x14,0x08,0x2a = vprotq $42, (%rax, %rcx), %xmm2 +0x8f,0xe8,0x78,0xc3,0xd1,0x2a = vprotq $42, %xmm1, %xmm2 +0x8f,0xe9,0xf8,0x92,0x18 = vprotd (%rax), %xmm0, %xmm3 +0x8f,0xe9,0x68,0x92,0x24,0x08 = vprotd %xmm2, (%rax, %rcx), %xmm4 +0x8f,0xe9,0x50,0x92,0xd3 = vprotd %xmm5, %xmm3, %xmm2 +0x8f,0xe8,0x78,0xc2,0x31,0x2b = vprotd $43, (%rcx), %xmm6 +0x8f,0xe8,0x78,0xc2,0x3c,0x08,0x2c = vprotd $44, (%rax, %rcx), %xmm7 +0x8f,0xe8,0x78,0xc2,0xe4,0x2d = vprotd $45, %xmm4, %xmm4 +0x8f,0xe9,0xe8,0x90,0x29 = vprotb (%rcx), %xmm2, %xmm5 +0x8f,0xe9,0x50,0x90,0x24,0x08 = vprotb %xmm5, (%rax, %rcx), %xmm4 +0x8f,0xe9,0x58,0x90,0xd3 = vprotb %xmm4, %xmm3, %xmm2 +0x8f,0xe8,0x78,0xc0,0x18,0x2e = vprotb $46, (%rax), %xmm3 +0x8f,0xe8,0x78,0xc0,0x3c,0x08,0x2f = vprotb $47, (%rax, %rcx), %xmm7 +0x8f,0xe8,0x78,0xc0,0xed,0x30 = vprotb $48, %xmm5, %xmm5 +0x8f,0xe8,0x60,0xb6,0xe2,0x10 = vpmadcswd %xmm1, %xmm2, %xmm3, %xmm4 +0x8f,0xe8,0x60,0xb6,0x20,0x10 = vpmadcswd %xmm1, (%rax), %xmm3, %xmm4 +0x8f,0xe8,0x48,0xa6,0xe4,0x10 = vpmadcsswd %xmm1, %xmm4, %xmm6, %xmm4 +0x8f,0xe8,0x60,0xa6,0x24,0x08,0x10 = vpmadcsswd %xmm1, (%rax, %rcx), %xmm3, %xmm4 +0x8f,0xe8,0x50,0x95,0xe2,0x00 = vpmacsww %xmm0, %xmm2, %xmm5, %xmm4 +0x8f,0xe8,0x48,0x95,0x20,0x10 = vpmacsww %xmm1, (%rax), %xmm6, %xmm4 +0x8f,0xe8,0x48,0x96,0xfd,0x40 = vpmacswd %xmm4, %xmm5, %xmm6, %xmm7 +0x8f,0xe8,0x70,0x96,0x10,0x00 = vpmacswd %xmm0, (%rax), %xmm1, %xmm2 +0x8f,0xe8,0x68,0x85,0xcb,0x40 = vpmacssww %xmm4, %xmm3, %xmm2, %xmm1 +0x8f,0xe8,0x40,0x85,0x39,0x60 = vpmacssww %xmm6, (%rcx), %xmm7, %xmm7 +0x8f,0xe8,0x58,0x86,0xd2,0x40 = vpmacsswd %xmm4, %xmm2, %xmm4, %xmm2 +0x8f,0xe8,0x70,0x86,0x44,0x08,0x08,0x00 = vpmacsswd %xmm0, 8(%rax, %rcx), %xmm1, %xmm0 +0x8f,0xe8,0x68,0x87,0xe1,0x10 = vpmacssdql %xmm1, %xmm1, %xmm2, %xmm4 +0x8f,0xe8,0x48,0x87,0x29,0x70 = vpmacssdql %xmm7, (%rcx), %xmm6, %xmm5 +0x8f,0xe8,0x78,0x8f,0xca,0x30 = vpmacssdqh %xmm3, %xmm2, %xmm0, %xmm1 +0x8f,0xe8,0x68,0x8f,0x1c,0x08,0x70 = vpmacssdqh %xmm7, (%rax, %rcx), %xmm2, %xmm3 +0x8f,0xe8,0x60,0x8e,0xea,0x20 = vpmacssdd %xmm2, %xmm2, %xmm3, %xmm5 +0x8f,0xe8,0x70,0x8e,0x10,0x40 = vpmacssdd %xmm4, (%rax), %xmm1, %xmm2 +0x8f,0xe8,0x48,0x97,0xf8,0x30 = vpmacsdql %xmm3, %xmm0, %xmm6, %xmm7 +0x8f,0xe8,0x60,0x97,0x69,0x08,0x50 = vpmacsdql %xmm5, 8(%rcx), %xmm3, %xmm5 +0x8f,0xe8,0x60,0x9f,0xd5,0x70 = vpmacsdqh %xmm7, %xmm5, %xmm3, %xmm2 +0x8f,0xe8,0x68,0x9f,0x40,0x04,0x50 = vpmacsdqh %xmm5, 4(%rax), %xmm2, %xmm0 +0x8f,0xe8,0x58,0x9e,0xd6,0x40 = vpmacsdd %xmm4, %xmm6, %xmm4, %xmm2 +0x8f,0xe8,0x58,0x9e,0x1c,0x08,0x40 = vpmacsdd %xmm4, (%rax, %rcx), %xmm4, %xmm3 +0x8f,0xe8,0x60,0xcd,0xe2,0x2a = vpcomw $42, %xmm2, %xmm3, %xmm4 +0x8f,0xe8,0x60,0xcd,0x20,0x2a = vpcomw $42, (%rax), %xmm3, %xmm4 +0x8f,0xe8,0x60,0xed,0xe9,0x2b = vpcomuw $43, %xmm1, %xmm3, %xmm5 +0x8f,0xe8,0x78,0xed,0x34,0x08,0x2c = vpcomuw $44, (%rax, %rcx), %xmm0, %xmm6 +0x8f,0xe8,0x60,0xef,0xfb,0x2d = vpcomuq $45, %xmm3, %xmm3, %xmm7 +0x8f,0xe8,0x60,0xef,0x08,0x2e = vpcomuq $46, (%rax), %xmm3, %xmm1 +0x8f,0xe8,0x70,0xee,0xd0,0x2f = vpcomud $47, %xmm0, %xmm1, %xmm2 +0x8f,0xe8,0x48,0xee,0x58,0x04,0x30 = vpcomud $48, 4(%rax), %xmm6, %xmm3 +0x8f,0xe8,0x58,0xec,0xeb,0x31 = vpcomub $49, %xmm3, %xmm4, %xmm5 +0x8f,0xe8,0x48,0xec,0x11,0x32 = vpcomub $50, (%rcx), %xmm6, %xmm2 +0x8f,0xe8,0x78,0xcf,0xeb,0x33 = vpcomq $51, %xmm3, %xmm0, %xmm5 +0x8f,0xe8,0x70,0xcf,0x38,0x34 = vpcomq $52, (%rax), %xmm1, %xmm7 +0x8f,0xe8,0x60,0xce,0xc3,0x35 = vpcomd $53, %xmm3, %xmm3, %xmm0 +0x8f,0xe8,0x68,0xce,0x11,0x36 = vpcomd $54, (%rcx), %xmm2, %xmm2 +0x8f,0xe8,0x58,0xcc,0xd6,0x37 = vpcomb $55, %xmm6, %xmm4, %xmm2 +0x8f,0xe8,0x60,0xcc,0x50,0x08,0x38 = vpcomb $56, 8(%rax), %xmm3, %xmm2 +0x8f,0xe8,0x60,0xa3,0xe2,0x10 = vpperm %xmm1, %xmm2, %xmm3, %xmm4 +0x8f,0xe8,0xe0,0xa3,0x20,0x20 = vpperm (%rax), %xmm2, %xmm3, %xmm4 +0x8f,0xe8,0x60,0xa3,0x20,0x10 = vpperm %xmm1, (%rax), %xmm3, %xmm4 +0x8f,0xe8,0x60,0xa2,0xe2,0x10 = vpcmov %xmm1, %xmm2, %xmm3, %xmm4 +0x8f,0xe8,0xe0,0xa2,0x20,0x20 = vpcmov (%rax), %xmm2, %xmm3, %xmm4 +0x8f,0xe8,0x60,0xa2,0x20,0x10 = vpcmov %xmm1, (%rax), %xmm3, %xmm4 +0x8f,0xe8,0x64,0xa2,0xe2,0x10 = vpcmov %ymm1, %ymm2, %ymm3, %ymm4 +0x8f,0xe8,0xe4,0xa2,0x20,0x20 = vpcmov (%rax), %ymm2, %ymm3, %ymm4 +0x8f,0xe8,0x64,0xa2,0x20,0x10 = vpcmov %ymm1, (%rax), %ymm3, %ymm4 +0xc4,0xe3,0x71,0x49,0xfa,0x51 = vpermil2pd $1, %xmm5, %xmm2, %xmm1, %xmm7 +0xc4,0xe3,0xe1,0x49,0x20,0x32 = vpermil2pd $2, (%rax), %xmm3, %xmm3, %xmm4 +0xc4,0xe3,0xdd,0x49,0x70,0x08,0x03 = vpermil2pd $3, 8(%rax), %ymm0, %ymm4, %ymm6 +0xc4,0xe3,0x71,0x49,0x04,0x08,0x30 = vpermil2pd $0, %xmm3, (%rax, %rcx), %xmm1, %xmm0 +0xc4,0xe3,0x65,0x49,0xe2,0x11 = vpermil2pd $1, %ymm1, %ymm2, %ymm3, %ymm4 +0xc4,0xe3,0x65,0x49,0x20,0x12 = vpermil2pd $2, %ymm1, (%rax), %ymm3, %ymm4 +0xc4,0xe3,0x69,0x48,0xcb,0x40 = vpermil2ps $0, %xmm4, %xmm3, %xmm2, %xmm1 +0xc4,0xe3,0xe1,0x48,0x40,0x04,0x21 = vpermil2ps $1, 4(%rax), %xmm2, %xmm3, %xmm0 +0xc4,0xe3,0xd5,0x48,0x30,0x12 = vpermil2ps $2, (%rax), %ymm1, %ymm5, %ymm6 +0xc4,0xe3,0x61,0x48,0x20,0x13 = vpermil2ps $3, %xmm1, (%rax), %xmm3, %xmm4 +0xc4,0xe3,0x6d,0x48,0xd4,0x40 = vpermil2ps $0, %ymm4, %ymm4, %ymm2, %ymm2 +0xc4,0xe3,0x75,0x49,0x40,0x04,0x11 = vpermil2pd $1, %ymm1, 4(%rax), %ymm1, %ymm0 |