diff --git a/api/docs/release.dox b/api/docs/release.dox
index 5b8faf839ed..5daf8576d9a 100644
--- a/api/docs/release.dox
+++ b/api/docs/release.dox
@@ -149,6 +149,8 @@ Further non-compatibility-affecting changes include:
on the 32/64 bit mode and the presence or absence of the address size prefix.
- Fixed the order of operands for the vpexpandd/vpexpandq opcodes (previously the
source and destination operands were reversed).
+ - Fixed the VEX-encoded forms of vaesdec/vaesdeclast/vaesenc/vaesenclast/vpclmulqdq
+ to obey VEX.L, which they previously ignored.
**************************************************
diff --git a/core/ir/x86/decode_table.c b/core/ir/x86/decode_table.c
index 9d6ec9205b8..22d4e7e87a9 100644
--- a/core/ir/x86/decode_table.c
+++ b/core/ir/x86/decode_table.c
@@ -6159,19 +6159,19 @@ const instr_info_t e_vex_extensions[][3] = {
{INVALID, 0x6638db18, catUncategorized, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* e_vex ext 32 */
{OP_aesenc, 0x6638dc18, catSIMD, "aesenc", Vdq, xx, Wdq,Vdq, xx, mrm|reqp, x, END_LIST},
- {OP_vaesenc, 0x6638dc18, catSIMD, "vaesenc", Vdq, xx, Hdq,Wdq, xx, mrm|vex|reqp, x, END_LIST},
+ {OP_vaesenc, 0x6638dc18, catSIMD, "vaesenc", Vx, xx, Hx,Wx, xx, mrm|vex|reqp, x, END_LIST},
{INVALID, 0x6638dc18, catUncategorized, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* e_vex ext 33 */
{OP_aesenclast, 0x6638dd18, catSIMD, "aesenclast",Vdq,xx,Wdq,Vdq,xx, mrm|reqp, x, END_LIST},
- {OP_vaesenclast, 0x6638dd18, catSIMD, "vaesenclast",Vdq,xx,Hdq,Wdq,xx, mrm|vex|reqp, x, END_LIST},
+ {OP_vaesenclast, 0x6638dd18, catSIMD, "vaesenclast",Vx,xx,Hx,Wx,xx, mrm|vex|reqp, x, END_LIST},
{INVALID, 0x6638dd18, catUncategorized, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* e_vex ext 34 */
{OP_aesdec, 0x6638de18, catSIMD, "aesdec", Vdq, xx, Wdq,Vdq, xx, mrm|reqp, x, END_LIST},
- {OP_vaesdec, 0x6638de18, catSIMD, "vaesdec", Vdq, xx, Hdq,Wdq, xx, mrm|vex|reqp, x, END_LIST},
+ {OP_vaesdec, 0x6638de18, catSIMD, "vaesdec", Vx, xx, Hx,Wx, xx, mrm|vex|reqp, x, END_LIST},
{INVALID, 0x6638de18, catUncategorized, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* e_vex ext 35 */
{OP_aesdeclast, 0x6638df18, catSIMD, "aesdeclast",Vdq,xx,Wdq,Vdq,xx, mrm|reqp, x, END_LIST},
- {OP_vaesdeclast, 0x6638df18, catSIMD, "vaesdeclast",Vdq,xx,Hdq,Wdq,xx, mrm|vex|reqp, x, END_LIST},
+ {OP_vaesdeclast, 0x6638df18, catSIMD, "vaesdeclast",Vx,xx,Hx,Wx,xx, mrm|vex|reqp, x, END_LIST},
{INVALID, 0x6638df18, catUncategorized, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* e_vex ext 36 */
{OP_pextrb, 0x663a1418, catSIMD, "pextrb", Rd_Mb, xx, Vb_dq, Ib, xx, mrm|reqp, x, END_LIST},
@@ -6260,7 +6260,7 @@ const instr_info_t e_vex_extensions[][3] = {
{INVALID, 0x663a6318, catUncategorized, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* e_vex ext 57 */
{OP_pclmulqdq, 0x663a4418, catSIMD, "pclmulqdq", Vdq, xx, Wdq, Ib, Vdq, mrm|reqp, x, END_LIST},
- {OP_vpclmulqdq,0x663a4418, catSIMD, "vpclmulqdq", Vdq, xx, Hdq, Wdq, Ib, mrm|vex|reqp, x, END_LIST},
+ {OP_vpclmulqdq,0x663a4418, catSIMD, "vpclmulqdq", Vx, xx, Hx, Wx, Ib, mrm|vex|reqp, x, END_LIST},
{INVALID, 0x663a4418, catUncategorized, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* e_vex ext 58 */
{OP_aeskeygenassist, 0x663adf18, catSIMD, "aeskeygenassist",Vdq,xx,Wdq,Ib,xx,mrm|reqp,x,END_LIST},
diff --git a/suite/tests/api/ir_x86_3args.h b/suite/tests/api/ir_x86_3args.h
index a91a8794127..340b1b0bf44 100644
--- a/suite/tests/api/ir_x86_3args.h
+++ b/suite/tests/api/ir_x86_3args.h
@@ -275,12 +275,62 @@ OPCODE(vpmaxsd, vpmaxsd, vpmaxsd, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16)
OPCODE(vpmaxuw, vpmaxuw, vpmaxuw, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16))
OPCODE(vpmaxud, vpmaxud, vpmaxud, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16))
OPCODE(vpmulld, vpmulld, vpmulld, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16))
-OPCODE(vaesenc, vaesenc, vaesenc, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16))
-OPCODE(vaesenclast, vaesenclast, vaesenclast, 0, REGARG(XMM0), REGARG(XMM1),
+OPCODE(vaesenc_xloxloxlo, vaesenc, vaesenc, 0, REGARG(XMM0), REGARG(XMM1), REGARG(XMM2))
+OPCODE(vaesenc_xloxlold, vaesenc, vaesenc, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16))
+OPCODE(vaesenc_xhixhixhi, vaesenc, vaesenc, X64_ONLY, REGARG(XMM8), REGARG(XMM9),
+ REGARG(XMM15))
+OPCODE(vaesenc_xhixhild, vaesenc, vaesenc, X64_ONLY, REGARG(XMM8), REGARG(XMM9),
MEMARG(OPSZ_16))
-OPCODE(vaesdec, vaesdec, vaesdec, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16))
-OPCODE(vaesdeclast, vaesdeclast, vaesdeclast, 0, REGARG(XMM0), REGARG(XMM1),
+OPCODE(vaesenc_yloyloylo, vaesenc, vaesenc, 0, REGARG(YMM0), REGARG(YMM1), REGARG(YMM2))
+OPCODE(vaesenc_yloylold, vaesenc, vaesenc, 0, REGARG(YMM0), REGARG(YMM1), MEMARG(OPSZ_32))
+OPCODE(vaesenc_yhiyhiyhi, vaesenc, vaesenc, X64_ONLY, REGARG(YMM8), REGARG(YMM9),
+ REGARG(YMM15))
+OPCODE(vaesenc_yhiyhild, vaesenc, vaesenc, X64_ONLY, REGARG(YMM8), REGARG(YMM9),
+ MEMARG(OPSZ_32))
+OPCODE(vaesenclast_xloxloxlo, vaesenclast, vaesenclast, 0, REGARG(XMM0), REGARG(XMM1),
+ REGARG(XMM2))
+OPCODE(vaesenclast_xloxlold, vaesenclast, vaesenclast, 0, REGARG(XMM0), REGARG(XMM1),
+ MEMARG(OPSZ_16))
+OPCODE(vaesenclast_xhixhixhi, vaesenclast, vaesenclast, X64_ONLY, REGARG(XMM8),
+ REGARG(XMM9), REGARG(XMM15))
+OPCODE(vaesenclast_xhixhild, vaesenclast, vaesenclast, X64_ONLY, REGARG(XMM8),
+ REGARG(XMM9), MEMARG(OPSZ_16))
+OPCODE(vaesenclast_yloyloylo, vaesenclast, vaesenclast, 0, REGARG(YMM0), REGARG(YMM1),
+ REGARG(YMM2))
+OPCODE(vaesenclast_yloylold, vaesenclast, vaesenclast, 0, REGARG(YMM0), REGARG(YMM1),
+ MEMARG(OPSZ_32))
+OPCODE(vaesenclast_yhiyhiyhi, vaesenclast, vaesenclast, X64_ONLY, REGARG(YMM8),
+ REGARG(YMM9), REGARG(YMM15))
+OPCODE(vaesenclast_yhiyhild, vaesenclast, vaesenclast, X64_ONLY, REGARG(YMM8),
+ REGARG(YMM9), MEMARG(OPSZ_32))
+OPCODE(vaesdec_xloxloxlo, vaesdec, vaesdec, 0, REGARG(XMM0), REGARG(XMM1), REGARG(XMM2))
+OPCODE(vaesdec_xloxlold, vaesdec, vaesdec, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16))
+OPCODE(vaesdec_xhixhixlo, vaesdec, vaesdec, X64_ONLY, REGARG(XMM8), REGARG(XMM9),
+ REGARG(XMM15))
+OPCODE(vaesdec_xhixhild, vaesdec, vaesdec, X64_ONLY, REGARG(XMM8), REGARG(XMM9),
MEMARG(OPSZ_16))
+OPCODE(vaesdec_yloyloylo, vaesdec, vaesdec, 0, REGARG(YMM0), REGARG(YMM1), REGARG(YMM2))
+OPCODE(vaesdec_yloylold, vaesdec, vaesdec, 0, REGARG(YMM0), REGARG(YMM1), MEMARG(OPSZ_32))
+OPCODE(vaesdec_yhiyhiylo, vaesdec, vaesdec, X64_ONLY, REGARG(YMM8), REGARG(YMM9),
+ REGARG(YMM15))
+OPCODE(vaesdec_yhiyhild, vaesdec, vaesdec, X64_ONLY, REGARG(YMM8), REGARG(YMM9),
+ MEMARG(OPSZ_32))
+OPCODE(vaesdeclast_xloxloxlo, vaesdeclast, vaesdeclast, 0, REGARG(XMM0), REGARG(XMM1),
+ REGARG(XMM2))
+OPCODE(vaesdeclast_xloxlold, vaesdeclast, vaesdeclast, 0, REGARG(XMM0), REGARG(XMM1),
+ MEMARG(OPSZ_16))
+OPCODE(vaesdeclast_xhixhixhi, vaesdeclast, vaesdeclast, X64_ONLY, REGARG(XMM8),
+ REGARG(XMM9), REGARG(XMM15))
+OPCODE(vaesdeclast_xhixhild, vaesdeclast, vaesdeclast, X64_ONLY, REGARG(XMM8),
+ REGARG(XMM9), MEMARG(OPSZ_16))
+OPCODE(vaesdeclast_yloyloylo, vaesdeclast, vaesdeclast, 0, REGARG(YMM0), REGARG(YMM1),
+ REGARG(YMM2))
+OPCODE(vaesdeclast_yloylold, vaesdeclast, vaesdeclast, 0, REGARG(YMM0), REGARG(YMM1),
+ MEMARG(OPSZ_32))
+OPCODE(vaesdeclast_yhiyhiyhi, vaesdeclast, vaesdeclast, X64_ONLY, REGARG(YMM8),
+ REGARG(YMM9), REGARG(YMM15))
+OPCODE(vaesdeclast_yhiyhild, vaesdeclast, vaesdeclast, X64_ONLY, REGARG(YMM8),
+ REGARG(YMM9), MEMARG(OPSZ_32))
OPCODE(vpextrb, vpextrb, vpextrb, 0, REGARG(EAX), REGARG_PARTIAL(XMM0, OPSZ_1),
IMMARG(OPSZ_1))
OPCODE(vpextrb_mem, vpextrb, vpextrb, 0, MEMARG(OPSZ_1), REGARG_PARTIAL(XMM0, OPSZ_1),
diff --git a/suite/tests/api/ir_x86_4args.h b/suite/tests/api/ir_x86_4args.h
index 786bd774089..cf13ae1a99c 100644
--- a/suite/tests/api/ir_x86_4args.h
+++ b/suite/tests/api/ir_x86_4args.h
@@ -90,8 +90,22 @@ OPCODE(vdppd, vdppd, vdppd, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16),
IMMARG(OPSZ_1))
OPCODE(vmpsadbw, vmpsadbw, vmpsadbw, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16),
IMMARG(OPSZ_1))
-OPCODE(vpclmulqdq, vpclmulqdq, vpclmulqdq, 0, REGARG(XMM0), REGARG(XMM1), MEMARG(OPSZ_16),
- IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_xloxloxlo, vpclmulqdq, vpclmulqdq, 0, REGARG(XMM0), REGARG(XMM1),
+ REGARG(XMM2), IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_xloxlold, vpclmulqdq, vpclmulqdq, 0, REGARG(XMM0), REGARG(XMM1),
+ MEMARG(OPSZ_16), IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_xhixhixhi, vpclmulqdq, vpclmulqdq, X64_ONLY, REGARG(XMM8), REGARG(XMM9),
+ REGARG(XMM15), IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_xhixhild, vpclmulqdq, vpclmulqdq, X64_ONLY, REGARG(XMM8), REGARG(XMM9),
+ MEMARG(OPSZ_16), IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_yloyloylo, vpclmulqdq, vpclmulqdq, 0, REGARG(YMM0), REGARG(YMM1),
+ REGARG(YMM2), IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_yloylold, vpclmulqdq, vpclmulqdq, 0, REGARG(YMM0), REGARG(YMM1),
+ MEMARG(OPSZ_32), IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_yhiyhiyhi, vpclmulqdq, vpclmulqdq, X64_ONLY, REGARG(YMM8), REGARG(YMM9),
+ REGARG(YMM15), IMMARG(OPSZ_1))
+OPCODE(vpclmulqdq_yhiyhild, vpclmulqdq, vpclmulqdq, X64_ONLY, REGARG(YMM8), REGARG(YMM9),
+ MEMARG(OPSZ_32), IMMARG(OPSZ_1))
OPCODE(vroundss, vroundss, vroundss, 0, REGARG(XMM0), REGARG_PARTIAL(XMM1, OPSZ_12),
MEMARG(OPSZ_4), IMMARG(OPSZ_1))
OPCODE(vroundsd, vroundsd, vroundsd, 0, REGARG(XMM0), REGARG_PARTIAL(XMM1, OPSZ_8),
diff --git a/third_party/binutils/test_decenc/drdecode_decenc_x86.expect b/third_party/binutils/test_decenc/drdecode_decenc_x86.expect
index 75e5e47a790..7e417953aa6 100644
--- a/third_party/binutils/test_decenc/drdecode_decenc_x86.expect
+++ b/third_party/binutils/test_decenc/drdecode_decenc_x86.expect
@@ -79758,26 +79758,26 @@ test_s:
90 nop
90 nop
90 nop
- c4 e2 4d dc d4 vaesenc %xmm6, %xmm4, %xmm2
- c4 e2 4d dc 39 vaesenc %xmm6, (%ecx), %xmm7
- c4 e2 4d dd d4 vaesenclast %xmm6, %xmm4, %xmm2
- c4 e2 4d dd 39 vaesenclast %xmm6, (%ecx), %xmm7
- c4 e2 4d de d4 vaesdec %xmm6, %xmm4, %xmm2
- c4 e2 4d de 39 vaesdec %xmm6, (%ecx), %xmm7
- c4 e2 4d df d4 vaesdeclast %xmm6, %xmm4, %xmm2
- c4 e2 4d df 39 vaesdeclast %xmm6, (%ecx), %xmm7
- c4 e2 4d dc d4 vaesenc %xmm6, %xmm4, %xmm2
- c4 e2 4d dc 39 vaesenc %xmm6, (%ecx), %xmm7
- c4 e2 4d dc 39 vaesenc %xmm6, (%ecx), %xmm7
- c4 e2 4d dd d4 vaesenclast %xmm6, %xmm4, %xmm2
- c4 e2 4d dd 39 vaesenclast %xmm6, (%ecx), %xmm7
- c4 e2 4d dd 39 vaesenclast %xmm6, (%ecx), %xmm7
- c4 e2 4d de d4 vaesdec %xmm6, %xmm4, %xmm2
- c4 e2 4d de 39 vaesdec %xmm6, (%ecx), %xmm7
- c4 e2 4d de 39 vaesdec %xmm6, (%ecx), %xmm7
- c4 e2 4d df d4 vaesdeclast %xmm6, %xmm4, %xmm2
- c4 e2 4d df 39 vaesdeclast %xmm6, (%ecx), %xmm7
- c4 e2 4d df 39 vaesdeclast %xmm6, (%ecx), %xmm7
+ c4 e2 4d dc d4 vaesenc %ymm6, %ymm4, %ymm2
+ c4 e2 4d dc 39 vaesenc %ymm6, (%ecx), %ymm7
+ c4 e2 4d dd d4 vaesenclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d dd 39 vaesenclast %ymm6, (%ecx), %ymm7
+ c4 e2 4d de d4 vaesdec %ymm6, %ymm4, %ymm2
+ c4 e2 4d de 39 vaesdec %ymm6, (%ecx), %ymm7
+ c4 e2 4d df d4 vaesdeclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d df 39 vaesdeclast %ymm6, (%ecx), %ymm7
+ c4 e2 4d dc d4 vaesenc %ymm6, %ymm4, %ymm2
+ c4 e2 4d dc 39 vaesenc %ymm6, (%ecx), %ymm7
+ c4 e2 4d dc 39 vaesenc %ymm6, (%ecx), %ymm7
+ c4 e2 4d dd d4 vaesenclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d dd 39 vaesenclast %ymm6, (%ecx), %ymm7
+ c4 e2 4d dd 39 vaesenclast %ymm6, (%ecx), %ymm7
+ c4 e2 4d de d4 vaesdec %ymm6, %ymm4, %ymm2
+ c4 e2 4d de 39 vaesdec %ymm6, (%ecx), %ymm7
+ c4 e2 4d de 39 vaesdec %ymm6, (%ecx), %ymm7
+ c4 e2 4d df d4 vaesdeclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d df 39 vaesdeclast %ymm6, (%ecx), %ymm7
+ c4 e2 4d df 39 vaesdeclast %ymm6, (%ecx), %ymm7
90 nop
90 nop
90 nop
@@ -79873,15 +79873,15 @@ test_s:
90 nop
90 nop
90 nop
- c4 e3 55 44 f4 ab vpclmulqdq %xmm5, %xmm4, $0xab, %xmm6
- c4 e3 55 44 b4 f4 c0 vpclmulqdq %xmm5, -0x0001e240(%esp,%esi,8), $0x7b, %xmm6
+ c4 e3 55 44 f4 ab vpclmulqdq %ymm5, %ymm4, $0xab, %ymm6
+ c4 e3 55 44 b4 f4 c0 vpclmulqdq %ymm5, -0x0001e240(%esp,%esi,8), $0x7b, %ymm6
1d fe ff 7b
- c4 e3 55 44 b2 e0 0f vpclmulqdq %xmm5, 0x00000fe0(%edx), $0x7b, %xmm6
+ c4 e3 55 44 b2 e0 0f vpclmulqdq %ymm5, 0x00000fe0(%edx), $0x7b, %ymm6
00 00 7b
- c4 e3 55 44 f4 ab vpclmulqdq %xmm5, %xmm4, $0xab, %xmm6
- c4 e3 55 44 b4 f4 c0 vpclmulqdq %xmm5, -0x0001e240(%esp,%esi,8), $0x7b, %xmm6
+ c4 e3 55 44 f4 ab vpclmulqdq %ymm5, %ymm4, $0xab, %ymm6
+ c4 e3 55 44 b4 f4 c0 vpclmulqdq %ymm5, -0x0001e240(%esp,%esi,8), $0x7b, %ymm6
1d fe ff 7b
- c4 e3 55 44 b2 e0 0f vpclmulqdq %xmm5, 0x00000fe0(%edx), $0x7b, %xmm6
+ c4 e3 55 44 b2 e0 0f vpclmulqdq %ymm5, 0x00000fe0(%edx), $0x7b, %ymm6
00 00 7b
90 nop
90 nop
diff --git a/third_party/binutils/test_decenc/drdecode_decenc_x86_64.expect b/third_party/binutils/test_decenc/drdecode_decenc_x86_64.expect
index 8edf7639616..825b147e33c 100644
--- a/third_party/binutils/test_decenc/drdecode_decenc_x86_64.expect
+++ b/third_party/binutils/test_decenc/drdecode_decenc_x86_64.expect
@@ -39072,26 +39072,26 @@ test_x86_64_s:
8f e9 c8 01 e6 tzmsk %rsi, %rsi
8f a9 b0 01 24 05 53 tzmsk -0x0000dead(,%r8), %r9
21 ff ff
- c4 e2 4d dc d4 vaesenc %xmm6, %xmm4, %xmm2
- c4 e2 4d dc 39 vaesenc %xmm6, (%rcx), %xmm7
- c4 e2 4d dd d4 vaesenclast %xmm6, %xmm4, %xmm2
- c4 e2 4d dd 39 vaesenclast %xmm6, (%rcx), %xmm7
- c4 e2 4d de d4 vaesdec %xmm6, %xmm4, %xmm2
- c4 e2 4d de 39 vaesdec %xmm6, (%rcx), %xmm7
- c4 e2 4d df d4 vaesdeclast %xmm6, %xmm4, %xmm2
- c4 e2 4d df 39 vaesdeclast %xmm6, (%rcx), %xmm7
- c4 e2 4d dc d4 vaesenc %xmm6, %xmm4, %xmm2
- c4 e2 4d dc 39 vaesenc %xmm6, (%rcx), %xmm7
- c4 e2 4d dc 39 vaesenc %xmm6, (%rcx), %xmm7
- c4 e2 4d dd d4 vaesenclast %xmm6, %xmm4, %xmm2
- c4 e2 4d dd 39 vaesenclast %xmm6, (%rcx), %xmm7
- c4 e2 4d dd 39 vaesenclast %xmm6, (%rcx), %xmm7
- c4 e2 4d de d4 vaesdec %xmm6, %xmm4, %xmm2
- c4 e2 4d de 39 vaesdec %xmm6, (%rcx), %xmm7
- c4 e2 4d de 39 vaesdec %xmm6, (%rcx), %xmm7
- c4 e2 4d df d4 vaesdeclast %xmm6, %xmm4, %xmm2
- c4 e2 4d df 39 vaesdeclast %xmm6, (%rcx), %xmm7
- c4 e2 4d df 39 vaesdeclast %xmm6, (%rcx), %xmm7
+ c4 e2 4d dc d4 vaesenc %ymm6, %ymm4, %ymm2
+ c4 e2 4d dc 39 vaesenc %ymm6, (%rcx), %ymm7
+ c4 e2 4d dd d4 vaesenclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d dd 39 vaesenclast %ymm6, (%rcx), %ymm7
+ c4 e2 4d de d4 vaesdec %ymm6, %ymm4, %ymm2
+ c4 e2 4d de 39 vaesdec %ymm6, (%rcx), %ymm7
+ c4 e2 4d df d4 vaesdeclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d df 39 vaesdeclast %ymm6, (%rcx), %ymm7
+ c4 e2 4d dc d4 vaesenc %ymm6, %ymm4, %ymm2
+ c4 e2 4d dc 39 vaesenc %ymm6, (%rcx), %ymm7
+ c4 e2 4d dc 39 vaesenc %ymm6, (%rcx), %ymm7
+ c4 e2 4d dd d4 vaesenclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d dd 39 vaesenclast %ymm6, (%rcx), %ymm7
+ c4 e2 4d dd 39 vaesenclast %ymm6, (%rcx), %ymm7
+ c4 e2 4d de d4 vaesdec %ymm6, %ymm4, %ymm2
+ c4 e2 4d de 39 vaesdec %ymm6, (%rcx), %ymm7
+ c4 e2 4d de 39 vaesdec %ymm6, (%rcx), %ymm7
+ c4 e2 4d df d4 vaesdeclast %ymm6, %ymm4, %ymm2
+ c4 e2 4d df 39 vaesdeclast %ymm6, (%rcx), %ymm7
+ c4 e2 4d df 39 vaesdeclast %ymm6, (%rcx), %ymm7
c5 f9 6e c0 vmovd %eax, %xmm0
c5 f9 6e 00 vmovd (%rax), %xmm0
c4 e1 79 6e c0 vmovd %eax, %xmm0
@@ -39159,14 +39159,21 @@ test_x86_64_s:
8f e9 78 01 30 blsic (%rax), %eax
8f e9 78 01 38 t1mskc (%rax), %eax
8f e9 78 01 20 tzmsk (%rax), %eax
- 24 01 and $0x01, %al
- 00 00 add %al, (%rax)
- 7b 62 jnp $0x0000000010018f7b
- 63 15 20 44 72 7f movsxd 0x000000008f73d33f, %edx
- 7b 34 jnp $0x0000000010018f55
- 12 00 adc (%rax), %al
- 00 7b f3 add %bh, -0x0d(%rbx)
- 0f ae f0 mfence
+ c4 43 35 44 d0 ab vpclmulqdq %ymm9, %ymm8, $0xab, %ymm10
+ c4 23 35 44 94 f0 24 vpclmulqdq %ymm9, 0x00000124(%rax,%r14,8), $0x7b, %ymm10
+ 01 00 00 7b
+ c4 63 35 44 92 e0 0f vpclmulqdq %ymm9, 0x00000fe0(%rdx), $0x7b, %ymm10
+ 00 00 7b
+ c4 43 25 44 e2 11 vpclmulqdq %ymm11, %ymm10, $0x11, %ymm12
+ c4 43 1d 44 eb 01 vpclmulqdq %ymm12, %ymm11, $0x01, %ymm13
+ c4 43 15 44 f4 10 vpclmulqdq %ymm13, %ymm12, $0x10, %ymm14
+ c4 43 0d 44 fd 00 vpclmulqdq %ymm14, %ymm13, $0x00, %ymm15
+ c4 43 35 44 d0 ab vpclmulqdq %ymm9, %ymm8, $0xab, %ymm10
+ c4 23 35 44 94 f0 34 vpclmulqdq %ymm9, 0x00001234(%rax,%r14,8), $0x7b, %ymm10
+ 12 00 00 7b
+ c4 63 35 44 92 e0 0f vpclmulqdq %ymm9, 0x00000fe0(%rdx), $0x7b, %ymm10
+ 00 00 7b
+ f3 0f ae f0 mfence
f3 41 0f ae f2 mfence
67 f3 41 0f ae f2 addr32 mfence
f2 0f ae f1 mfence
@@ -41150,7 +41157,7 @@ test_x86_64_s:
41 03 00 add (%r8), %eax
45 03 00 add (%r8), %r8d
49 03 00 add (%r8), %rax
- 03 05 22 22 22 22 add 0x000000003223dc8b, %eax
+ 03 05 22 22 22 22 add 0x000000003223dcc7, %eax
03 45 00 add 0x00(%rbp), %eax
03 04 25 22 22 22 22 add 0x22222222, %eax
41 03 45 00 add 0x00(%r13), %eax
@@ -41182,12 +41189,12 @@ test_x86_64_s:
83 04 81 11 addl $0x11, (%rcx,%rax,4)
41 83 04 81 11 addl $0x11, (%r9,%rax,4)
42 83 04 81 11 addl $0x11, (%rcx,%r8,4)
- 83 05 22 22 22 22 33 addl $0x33, 0x000000003223dd06
- 48 83 05 22 22 22 22 addq $0x33, 0x000000003223dd0e
+ 83 05 22 22 22 22 33 addl $0x33, 0x000000003223dd42
+ 48 83 05 22 22 22 22 addq $0x33, 0x000000003223dd4a
33
- 81 05 22 22 22 22 33 addl $0x33333333, 0x000000003223dd18
+ 81 05 22 22 22 22 33 addl $0x33333333, 0x000000003223dd54
33 33 33
- 48 81 05 22 22 22 22 addq $0x33333333, 0x000000003223dd23
+ 48 81 05 22 22 22 22 addq $0x33333333, 0x000000003223dd5f
33 33 33 33
83 04 c5 22 22 22 22 addl $0x33, 0x22222222(,%rax,8)
33
@@ -41221,7 +41228,7 @@ test_x86_64_s:
00 00
8b 04 25 00 00 00 00 mov 0x00, %eax
8b 80 00 00 00 00 mov 0x00000000(%rax), %eax
- 8b 05 00 00 00 00 mov 0x000000001001bb99, %eax
+ 8b 05 00 00 00 00 mov 0x000000001001bbd5, %eax
b0 00 mov $0x00, %al
66 b8 00 00 data16 mov $0x0000, %ax
b8 00 00 00 00 mov $0x00000000, %eax
@@ -41230,7 +41237,7 @@ test_x86_64_s:
00 00
8b 04 25 00 00 00 00 mov 0x00, %eax
8b 80 00 00 00 00 mov 0x00000000(%rax), %eax
- 8b 05 00 00 00 00 mov 0x000000001001bbc7, %eax
+ 8b 05 00 00 00 00 mov 0x000000001001bc03, %eax
a0 11 22 33 44 55 66 mov 0x8877665544332211, %al
77 88
66 a1 11 22 33 44 55 data16 mov 0x8877665544332211, %ax
@@ -41403,10 +41410,10 @@ test_x86_64_s:
48 05 00 00 00 00 add $0x00000000, %rax
66 b8 00 00 data16 mov $0x0000, %ax
b0 00 mov $0x00, %al
- 8b 05 00 00 00 00 mov 0x000000001001be79, %eax
- 67 8b 05 00 00 00 00 addr32 mov 0x000000001001be80, %eax
- e8 00 00 00 00 call $0x000000001001be85
- e3 00 jrcxz $0x000000001001be87
+ 8b 05 00 00 00 00 mov 0x000000001001beb5, %eax
+ 67 8b 05 00 00 00 00 addr32 mov 0x000000001001bebc, %eax
+ e8 00 00 00 00 call $0x000000001001bec1
+ e3 00 jrcxz $0x000000001001bec3
48 b8 00 00 00 00 00 mov $0x0000000000000000, %rax
00 00 00
48 05 00 00 00 00 add $0x00000000, %rax
@@ -41415,22 +41422,22 @@ test_x86_64_s:
00 00 00
48 05 00 00 00 00 add $0x00000000, %rax
8b 83 00 00 00 00 mov 0x00000000(%rbx), %eax
- e8 00 00 00 00 call $0x000000001001beb8
+ e8 00 00 00 00 call $0x000000001001bef4
48 05 00 00 00 00 add $0x00000000, %rax
- 48 8d 05 00 00 00 00 lea 0x000000001001bec5, %rax
+ 48 8d 05 00 00 00 00 lea 0x000000001001bf01, %rax
48 05 00 00 00 00 add $0x00000000, %rax
48 05 00 00 00 00 add $0x00000000, %rax
8b 83 00 00 00 00 mov 0x00000000(%rbx), %eax
- e8 00 00 00 00 call $0x000000001001bedc
+ e8 00 00 00 00 call $0x000000001001bf18
48 05 00 00 00 00 add $0x00000000, %rax
8b 83 00 00 00 00 mov 0x00000000(%rbx), %eax
- e8 00 00 00 00 call $0x000000001001beed
+ e8 00 00 00 00 call $0x000000001001bf29
48 05 00 00 00 00 add $0x00000000, %rax
8b 83 00 00 00 00 mov 0x00000000(%rbx), %eax
- e8 00 00 00 00 call $0x000000001001befe
+ e8 00 00 00 00 call $0x000000001001bf3a
48 05 00 00 00 00 add $0x00000000, %rax
8b 83 00 00 00 00 mov 0x00000000(%rbx), %eax
- e8 00 00 00 00 call $0x000000001001bf0f
+ e8 00 00 00 00 call $0x000000001001bf4b
48 b8 00 00 00 00 00 mov $0x0000000000000000, %rax
00 00 00
48 05 00 00 00 00 add $0x00000000, %rax
@@ -104682,7 +104689,7 @@ test_x86_64_s:
7b 00 00 00
62 e2 7d 21 91 84 c5 vpgatherqd 0x0000007b(%rbp,%ymm16,8), %ymm16 {%k1} {%k1}
7b 00 00 00
- 62 f1 7c 48 28 05 00 vmovaps 0x0000000010073012, %zmm0 {%k0}
+ 62 f1 7c 48 28 05 00 vmovaps 0x000000001007304e, %zmm0 {%k0}
fc ff ff
62 f1 7c 48 28 04 05 vmovaps 0x40(,%rax), %zmm0 {%k0}
40 00 00 00
@@ -104931,9 +104938,9 @@ test_x86_64_s:
48 0f 38 f9 01 movdiri %rax, (%rcx)
66 0f 38 f8 01 movdir64b (%rcx), %rax
67 66 0f 38 f8 01 movdir64b (%ecx), %eax
- 66 0f 38 f8 0d 00 00 movdir64b 0x00000000100739ad, %rcx
+ 66 0f 38 f8 0d 00 00 movdir64b 0x00000000100739e9, %rcx
00 00
- 67 66 0f 38 f8 0d 00 movdir64b 0x00000000100739b7, %ecx
+ 67 66 0f 38 f8 0d 00 movdir64b 0x00000000100739f3, %ecx
00 00 00
67 66 0f 38 f8 0c 25 movdir64b 0x00, %ecx
00 00 00 00
@@ -104945,9 +104952,9 @@ test_x86_64_s:
48 0f 38 f9 01 movdiri %rax, (%rcx)
66 0f 38 f8 01 movdir64b (%rcx), %rax
67 66 0f 38 f8 01 movdir64b (%ecx), %eax
- 66 0f 38 f8 0d 00 00 movdir64b 0x00000000100739f3, %rcx
+ 66 0f 38 f8 0d 00 00 movdir64b 0x0000000010073a2f, %rcx
00 00
- 67 66 0f 38 f8 0d 00 movdir64b 0x00000000100739fd, %ecx
+ 67 66 0f 38 f8 0d 00 movdir64b 0x0000000010073a39, %ecx
00 00 00
67 66 0f 38 f8 0c 25 movdir64b 0x00, %ecx
00 00 00 00
@@ -104957,13 +104964,13 @@ test_x86_64_s:
67 f2 0f 38 f8 01 enqcmd (%ecx), %eax
f3 0f 38 f8 01 enqcmds (%rcx), %rax
67 f3 0f 38 f8 01 enqcmds (%ecx), %eax
- f2 0f 38 f8 0d 00 00 enqcmd 0x0000000010073a32, %rcx
+ f2 0f 38 f8 0d 00 00 enqcmd 0x0000000010073a6e, %rcx
00 00
- 67 f2 0f 38 f8 0d 00 enqcmd 0x0000000010073a3c, %ecx
+ 67 f2 0f 38 f8 0d 00 enqcmd 0x0000000010073a78, %ecx
00 00 00
- f3 0f 38 f8 0d 00 00 enqcmds 0x0000000010073a45, %rcx
+ f3 0f 38 f8 0d 00 00 enqcmds 0x0000000010073a81, %rcx
00 00
- 67 f3 0f 38 f8 0d 00 enqcmds 0x0000000010073a4f, %ecx
+ 67 f3 0f 38 f8 0d 00 enqcmds 0x0000000010073a8b, %ecx
00 00 00
f2 0f 38 f8 0c 25 00 enqcmd 0x00, %rcx
00 00 00
@@ -104977,13 +104984,13 @@ test_x86_64_s:
67 f2 0f 38 f8 01 enqcmd (%ecx), %eax
f3 0f 38 f8 01 enqcmds (%rcx), %rax
67 f3 0f 38 f8 01 enqcmds (%ecx), %eax
- f2 0f 38 f8 0d 00 00 enqcmd 0x0000000010073a98, %rcx
+ f2 0f 38 f8 0d 00 00 enqcmd 0x0000000010073ad4, %rcx
00 00
- 67 f2 0f 38 f8 0d 00 enqcmd 0x0000000010073aa2, %ecx
+ 67 f2 0f 38 f8 0d 00 enqcmd 0x0000000010073ade, %ecx
00 00 00
- f3 0f 38 f8 0d 00 00 enqcmds 0x0000000010073aab, %rcx
+ f3 0f 38 f8 0d 00 00 enqcmds 0x0000000010073ae7, %rcx
00 00
- 67 f3 0f 38 f8 0d 00 enqcmds 0x0000000010073ab5, %ecx
+ 67 f3 0f 38 f8 0d 00 enqcmds 0x0000000010073af1, %ecx
00 00 00
f2 0f 38 f8 0c 25 00 enqcmd 0x00, %rcx
00 00 00
diff --git a/third_party/binutils/test_decenc/test_decenc_x86_64.asm b/third_party/binutils/test_decenc/test_decenc_x86_64.asm
index 963381eceb2..683e82d5686 100644
--- a/third_party/binutils/test_decenc/test_decenc_x86_64.asm
+++ b/third_party/binutils/test_decenc/test_decenc_x86_64.asm
@@ -39618,17 +39618,20 @@ GLOBAL_LABEL(FUNCNAME:)
/* x86_64_vpclmulqdq.s */
- /* FIXME i#1312: Support AVX-512. */
- /* RAW(62) RAW(03) RAW(15) RAW(20) RAW(44) RAW(f4) RAW(ab) */
- /* RAW(62) RAW(23) RAW(15) RAW(20) RAW(44) RAW(b4) RAW(f0) */
- RAW(24) RAW(01) RAW(00) RAW(00) RAW(7b)
- RAW(62) RAW(63) RAW(15) RAW(20) RAW(44) RAW(72) RAW(7f)
- RAW(7b)
- /* RAW(62) RAW(03) RAW(15) RAW(20) RAW(44) RAW(f4) RAW(ab) */
- /* RAW(62) RAW(23) RAW(15) RAW(20) RAW(44) RAW(b4) RAW(f0) */
- RAW(34) RAW(12) RAW(00) RAW(00) RAW(7b)
- /* RAW(62) RAW(63) RAW(15) RAW(20) RAW(44) RAW(72) RAW(7f) */
- /* RAW(7b) */
+ RAW(c4) RAW(43) RAW(35) RAW(44) RAW(d0) RAW(ab)
+ RAW(c4) RAW(23) RAW(35) RAW(44) RAW(94) RAW(f0) RAW(24)
+ RAW(01) RAW(00) RAW(00) RAW(7b)
+ RAW(c4) RAW(63) RAW(35) RAW(44) RAW(92) RAW(e0) RAW(0f)
+ RAW(00) RAW(00) RAW(7b)
+ RAW(c4) RAW(43) RAW(25) RAW(44) RAW(e2) RAW(11)
+ RAW(c4) RAW(43) RAW(1d) RAW(44) RAW(eb) RAW(01)
+ RAW(c4) RAW(43) RAW(15) RAW(44) RAW(f4) RAW(10)
+ RAW(c4) RAW(43) RAW(0d) RAW(44) RAW(fd) RAW(00)
+ RAW(c4) RAW(43) RAW(35) RAW(44) RAW(d0) RAW(ab)
+ RAW(c4) RAW(23) RAW(35) RAW(44) RAW(94) RAW(f0) RAW(34)
+ RAW(12) RAW(00) RAW(00) RAW(7b)
+ RAW(c4) RAW(63) RAW(35) RAW(44) RAW(92) RAW(e0) RAW(0f)
+ RAW(00) RAW(00) RAW(7b)
/* x86_64_waitpkg.s */