summaryrefslogtreecommitdiff
path: root/linux-x86_64
diff options
context:
space:
mode:
authorRobert Sloan <varomodt@google.com>2017-02-06 08:36:14 -0800
committerRob Sloan <varomodt@google.com>2017-02-06 16:52:08 +0000
commit4d1ac508237e73d6596202e4fa36d93ecf2321d8 (patch)
tree3896df97237dd9df2d08b8984f90f8d88d332c54 /linux-x86_64
parentc0dedc036b11ab0067f7ceb2e6d9105eec098f89 (diff)
downloadboringssl-4d1ac508237e73d6596202e4fa36d93ecf2321d8.tar.gz
external/boringssl: Sync to b2ff2623a88a65fd4db42d3820f3d8c64e8ab180.
This includes the following changes: https://boringssl.googlesource.com/boringssl/+log/6d50f475e319de153a43e1dba5a1beca95948c63..b2ff2623a88a65fd4db42d3820f3d8c64e8ab180 Change-Id: I649281e093369d99e863b4882a2ff6a5ad8a64d1 Test: ATP's cts/libcore/gce-net (go/gce-net)
Diffstat (limited to 'linux-x86_64')
-rw-r--r--linux-x86_64/crypto/bn/x86_64-mont5.S5
-rw-r--r--linux-x86_64/crypto/cipher/chacha20_poly1305_x86_64.S280
2 files changed, 141 insertions, 144 deletions
diff --git a/linux-x86_64/crypto/bn/x86_64-mont5.S b/linux-x86_64/crypto/bn/x86_64-mont5.S
index 554df1ff..5d7502c3 100644
--- a/linux-x86_64/crypto/bn/x86_64-mont5.S
+++ b/linux-x86_64/crypto/bn/x86_64-mont5.S
@@ -1826,6 +1826,7 @@ __bn_sqr8x_reduction:
.align 32
.L8x_tail_done:
+ xorq %rax,%rax
addq (%rdx),%r8
adcq $0,%r9
adcq $0,%r10
@@ -1834,9 +1835,7 @@ __bn_sqr8x_reduction:
adcq $0,%r13
adcq $0,%r14
adcq $0,%r15
-
-
- xorq %rax,%rax
+ adcq $0,%rax
negq %rsi
.L8x_no_tail:
diff --git a/linux-x86_64/crypto/cipher/chacha20_poly1305_x86_64.S b/linux-x86_64/crypto/cipher/chacha20_poly1305_x86_64.S
index 889c535c..241d7d02 100644
--- a/linux-x86_64/crypto/cipher/chacha20_poly1305_x86_64.S
+++ b/linux-x86_64/crypto/cipher/chacha20_poly1305_x86_64.S
@@ -61,7 +61,7 @@ poly_fast_tls_ad:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -74,7 +74,7 @@ poly_fast_tls_ad:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -108,7 +108,7 @@ hash_ad_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -121,7 +121,7 @@ hash_ad_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -140,7 +140,7 @@ hash_ad_loop:
adcq %r9,%r11
adcq $0,%r12
- leaq (1*16)(%rcx),%rcx
+ leaq 16(%rcx),%rcx
subq $16,%r8
jmp hash_ad_loop
hash_ad_tail:
@@ -170,7 +170,7 @@ hash_ad_tail_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -183,7 +183,7 @@ hash_ad_tail_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -238,7 +238,6 @@ chacha20_poly1305_open:
.cfi_offset r13, -40
.cfi_offset r14, -48
.cfi_offset r15, -56
-.cfi_offset %r9, -64
leaq 32(%rsp),%rbp
andq $-32,%rbp
movq %rdx,8+32(%rbp)
@@ -406,7 +405,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movdqa .rol8(%rip),%xmm8
@@ -459,7 +458,7 @@ open_sse_main_loop:
pslld $32-25,%xmm4
pxor %xmm8,%xmm4
movdqa 80(%rbp),%xmm8
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
.byte 102,15,58,15,255,4
@@ -594,7 +593,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -607,7 +606,7 @@ open_sse_main_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -727,7 +726,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -740,7 +739,7 @@ open_sse_main_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -845,7 +844,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -858,7 +857,7 @@ open_sse_main_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -1032,7 +1031,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -1045,7 +1044,7 @@ open_sse_main_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -1209,7 +1208,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -1222,7 +1221,7 @@ open_sse_main_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -1253,7 +1252,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -1266,7 +1265,7 @@ open_sse_main_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -1431,7 +1430,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movdqa %xmm9,80(%rbp)
@@ -1510,7 +1509,7 @@ open_sse_main_loop:
.byte 102,15,58,15,237,12
.byte 102,69,15,58,15,201,8
.byte 102,69,15,58,15,237,4
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
paddd %xmm6,%xmm2
@@ -1590,7 +1589,7 @@ open_sse_main_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -1603,7 +1602,7 @@ open_sse_main_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -1742,7 +1741,7 @@ open_sse_tail_16:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -1755,7 +1754,7 @@ open_sse_tail_16:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -1786,7 +1785,7 @@ open_sse_finalize:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -1799,7 +1798,7 @@ open_sse_finalize:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -2042,7 +2041,7 @@ open_sse_128:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -2055,7 +2054,7 @@ open_sse_128:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -2120,7 +2119,6 @@ chacha20_poly1305_seal:
.cfi_offset r13, -40
.cfi_offset r14, -48
.cfi_offset r15, -56
-.cfi_offset %r9, -64
leaq 32(%rsp),%rbp
andq $-32,%rbp
movq %rdx,8+32(%rbp)
@@ -2508,7 +2506,7 @@ chacha20_poly1305_seal:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movdqa .rol8(%rip),%xmm8
@@ -2561,7 +2559,7 @@ chacha20_poly1305_seal:
pslld $32-25,%xmm4
pxor %xmm8,%xmm4
movdqa 80(%rbp),%xmm8
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
.byte 102,15,58,15,255,4
@@ -2697,7 +2695,7 @@ chacha20_poly1305_seal:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -2710,7 +2708,7 @@ chacha20_poly1305_seal:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -2843,7 +2841,7 @@ seal_sse_tail_64:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -2856,7 +2854,7 @@ seal_sse_tail_64:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -2929,7 +2927,7 @@ seal_sse_tail_64:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -2942,7 +2940,7 @@ seal_sse_tail_64:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3001,7 +2999,7 @@ seal_sse_tail_128:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3014,7 +3012,7 @@ seal_sse_tail_128:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3087,7 +3085,7 @@ seal_sse_tail_128:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3100,7 +3098,7 @@ seal_sse_tail_128:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3224,7 +3222,7 @@ seal_sse_tail_192:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3237,7 +3235,7 @@ seal_sse_tail_192:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3331,7 +3329,7 @@ seal_sse_tail_192:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3344,7 +3342,7 @@ seal_sse_tail_192:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3485,7 +3483,7 @@ seal_sse_128_seal_hash:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3498,7 +3496,7 @@ seal_sse_128_seal_hash:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3542,7 +3540,7 @@ seal_sse_128_seal:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3555,7 +3553,7 @@ seal_sse_128_seal:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3627,7 +3625,7 @@ seal_sse_tail_16:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3640,7 +3638,7 @@ seal_sse_tail_16:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3670,7 +3668,7 @@ seal_sse_finalize:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -3683,7 +3681,7 @@ seal_sse_finalize:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -3997,7 +3995,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -4010,7 +4008,7 @@ chacha20_poly1305_open_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -4085,7 +4083,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vpshufb %ymm8,%ymm15,%ymm15
@@ -4103,7 +4101,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpxor %ymm11,%ymm7,%ymm7
vpxor %ymm10,%ymm6,%ymm6
vpxor %ymm9,%ymm5,%ymm5
@@ -4167,7 +4165,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vmovdqa %ymm8,128(%rbp)
@@ -4194,7 +4192,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpalignr $8,%ymm10,%ymm10,%ymm10
vpalignr $12,%ymm14,%ymm14,%ymm14
vpalignr $4,%ymm5,%ymm5,%ymm5
@@ -4274,7 +4272,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vpshufb %ymm8,%ymm15,%ymm15
@@ -4291,7 +4289,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpaddd %ymm12,%ymm8,%ymm8
vpxor %ymm11,%ymm7,%ymm7
vpxor %ymm10,%ymm6,%ymm6
@@ -4385,7 +4383,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -4398,7 +4396,7 @@ chacha20_poly1305_open_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -4450,7 +4448,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -4463,7 +4461,7 @@ chacha20_poly1305_open_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -4528,7 +4526,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -4541,7 +4539,7 @@ chacha20_poly1305_open_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -4648,7 +4646,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -4657,7 +4655,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -4795,7 +4793,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -4804,7 +4802,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -4894,7 +4892,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -4903,7 +4901,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -4991,7 +4989,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -5004,7 +5002,7 @@ chacha20_poly1305_open_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5102,7 +5100,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -5111,7 +5109,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5215,7 +5213,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -5228,7 +5226,7 @@ chacha20_poly1305_open_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5293,7 +5291,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -5302,7 +5300,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5375,7 +5373,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -5384,7 +5382,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5506,7 +5504,7 @@ chacha20_poly1305_open_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -5515,7 +5513,7 @@ chacha20_poly1305_open_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5758,7 +5756,7 @@ open_avx2_hash_and_xor_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -5771,7 +5769,7 @@ open_avx2_hash_and_xor_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5799,7 +5797,7 @@ open_avx2_hash_and_xor_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -5812,7 +5810,7 @@ open_avx2_hash_and_xor_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -5862,7 +5860,7 @@ open_avx2_short_tail_32:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -5875,7 +5873,7 @@ open_avx2_short_tail_32:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -6632,7 +6630,7 @@ chacha20_poly1305_seal_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vpshufb %ymm8,%ymm15,%ymm15
@@ -6650,7 +6648,7 @@ chacha20_poly1305_seal_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpxor %ymm11,%ymm7,%ymm7
vpxor %ymm10,%ymm6,%ymm6
vpxor %ymm9,%ymm5,%ymm5
@@ -6716,7 +6714,7 @@ chacha20_poly1305_seal_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vmovdqa %ymm8,128(%rbp)
@@ -6743,7 +6741,7 @@ chacha20_poly1305_seal_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpalignr $8,%ymm10,%ymm10,%ymm10
vpalignr $12,%ymm14,%ymm14,%ymm14
vpalignr $4,%ymm5,%ymm5,%ymm5
@@ -6823,7 +6821,7 @@ chacha20_poly1305_seal_avx2:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vpshufb %ymm8,%ymm15,%ymm15
@@ -6840,7 +6838,7 @@ chacha20_poly1305_seal_avx2:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpaddd %ymm12,%ymm8,%ymm8
vpxor %ymm11,%ymm7,%ymm7
vpxor %ymm10,%ymm6,%ymm6
@@ -6935,7 +6933,7 @@ chacha20_poly1305_seal_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -6948,7 +6946,7 @@ chacha20_poly1305_seal_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7000,7 +6998,7 @@ chacha20_poly1305_seal_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7013,7 +7011,7 @@ chacha20_poly1305_seal_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7058,7 +7056,7 @@ chacha20_poly1305_seal_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7071,7 +7069,7 @@ chacha20_poly1305_seal_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7099,7 +7097,7 @@ chacha20_poly1305_seal_avx2:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7112,7 +7110,7 @@ chacha20_poly1305_seal_avx2:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7156,7 +7154,7 @@ seal_avx2_tail_128:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7169,7 +7167,7 @@ seal_avx2_tail_128:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7219,7 +7217,7 @@ seal_avx2_tail_128:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7232,7 +7230,7 @@ seal_avx2_tail_128:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7279,7 +7277,7 @@ seal_avx2_tail_128:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7292,7 +7290,7 @@ seal_avx2_tail_128:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7355,7 +7353,7 @@ seal_avx2_tail_256:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7368,7 +7366,7 @@ seal_avx2_tail_256:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7437,7 +7435,7 @@ seal_avx2_tail_256:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7450,7 +7448,7 @@ seal_avx2_tail_256:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7516,7 +7514,7 @@ seal_avx2_tail_256:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7529,7 +7527,7 @@ seal_avx2_tail_256:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7616,7 +7614,7 @@ seal_avx2_tail_384:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7629,7 +7627,7 @@ seal_avx2_tail_384:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7698,7 +7696,7 @@ seal_avx2_tail_384:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7711,7 +7709,7 @@ seal_avx2_tail_384:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7777,7 +7775,7 @@ seal_avx2_tail_384:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -7790,7 +7788,7 @@ seal_avx2_tail_384:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -7930,7 +7928,7 @@ seal_avx2_tail_512:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rdx
@@ -7939,7 +7937,7 @@ seal_avx2_tail_512:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
addq %rax,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -8007,7 +8005,7 @@ seal_avx2_tail_512:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vpaddd %ymm4,%ymm0,%ymm0
@@ -8036,7 +8034,7 @@ seal_avx2_tail_512:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpslld $32-25,%ymm7,%ymm7
vpxor %ymm8,%ymm7,%ymm7
vpsrld $25,%ymm6,%ymm8
@@ -8141,7 +8139,7 @@ seal_avx2_tail_512:
movq %rdx,%r15
mulxq %r10,%r13,%r14
mulxq %r11,%rax,%rdx
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
vpxor %ymm9,%ymm5,%ymm5
@@ -8170,7 +8168,7 @@ seal_avx2_tail_512:
mulxq %r11,%r11,%r9
adcq %r11,%r15
adcq $0,%r9
- imul %r12,%rdx
+ imulq %r12,%rdx
vpalignr $8,%ymm10,%ymm10,%ymm10
vpalignr $4,%ymm14,%ymm14,%ymm14
vpalignr $12,%ymm5,%ymm5,%ymm5
@@ -8590,7 +8588,7 @@ seal_avx2_hash:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -8603,7 +8601,7 @@ seal_avx2_hash:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -8644,7 +8642,7 @@ seal_avx2_short_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -8657,7 +8655,7 @@ seal_avx2_short_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -8685,7 +8683,7 @@ seal_avx2_short_loop:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -8698,7 +8696,7 @@ seal_avx2_short_loop:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10
@@ -8746,7 +8744,7 @@ seal_avx2_short_tail:
movq %rdx,%r14
movq 0+0(%rbp),%rax
mulq %r11
- imul %r12,%r15
+ imulq %r12,%r15
addq %rax,%r14
adcq %rdx,%r15
movq 8+0(%rbp),%rax
@@ -8759,7 +8757,7 @@ seal_avx2_short_tail:
mulq %r11
addq %rax,%r15
adcq $0,%rdx
- imul %r12,%r9
+ imulq %r12,%r9
addq %r10,%r15
adcq %rdx,%r9
movq %r13,%r10