1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
|
// This file is generated from a similarly-named Perl script in the BoringSSL
// source tree. Do not edit by hand.
#if !defined(__has_feature)
#define __has_feature(x) 0
#endif
#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
#define OPENSSL_NO_ASM
#endif
#if !defined(OPENSSL_NO_ASM)
#if defined(__aarch64__)
#if defined(BORINGSSL_PREFIX)
#include <boringssl_prefix_symbols_asm.h>
#endif
.section .rodata
# p434 x 2
.Lp434x2:
.quad 0xFFFFFFFFFFFFFFFE, 0xFFFFFFFFFFFFFFFF
.quad 0xFB82ECF5C5FFFFFF, 0xF78CB8F062B15D47
.quad 0xD9F8BFAD038A40AC, 0x0004683E4E2EE688
# p434 + 1
.Lp434p1:
.quad 0xFDC1767AE3000000, 0x7BC65C783158AEA3
.quad 0x6CFC5FD681C52056, 0x0002341F27177344
.text
.globl sike_mpmul
.hidden sike_mpmul
.align 4
sike_mpmul:
stp x29, x30, [sp,#-96]!
add x29, sp, #0
stp x19, x20, [sp,#16]
stp x21, x22, [sp,#32]
stp x23, x24, [sp,#48]
stp x25, x26, [sp,#64]
stp x27, x28, [sp,#80]
ldp x3, x4, [x0]
ldp x5, x6, [x0,#16]
ldp x7, x8, [x0,#32]
ldr x9, [x0,#48]
ldp x10, x11, [x1,#0]
ldp x12, x13, [x1,#16]
ldp x14, x15, [x1,#32]
ldr x16, [x1,#48]
// x3-x7 <- AH + AL, x7 <- carry
adds x3, x3, x7
adcs x4, x4, x8
adcs x5, x5, x9
adcs x6, x6, xzr
adc x7, xzr, xzr
// x10-x13 <- BH + BL, x8 <- carry
adds x10, x10, x14
adcs x11, x11, x15
adcs x12, x12, x16
adcs x13, x13, xzr
adc x8, xzr, xzr
// x9 <- combined carry
and x9, x7, x8
// x7-x8 <- mask
sub x7, xzr, x7
sub x8, xzr, x8
// x15-x19 <- masked (BH + BL)
and x14, x10, x7
and x15, x11, x7
and x16, x12, x7
and x17, x13, x7
// x20-x23 <- masked (AH + AL)
and x20, x3, x8
and x21, x4, x8
and x22, x5, x8
and x23, x6, x8
// x15-x19, x7 <- masked (AH+AL) + masked (BH+BL), step 1
adds x14, x14, x20
adcs x15, x15, x21
adcs x16, x16, x22
adcs x17, x17, x23
adc x7, x9, xzr
// x8-x9,x19,x20-x24 <- (AH+AL) x (BH+BL), low part
stp x3, x4, [x2,#0]
// A0-A1 <- AH + AL, T0 <- mask
adds x3, x3, x5
adcs x4, x4, x6
adc x25, xzr, xzr
// C6, T1 <- BH + BL, C7 <- mask
adds x23, x10, x12
adcs x26, x11, x13
adc x24, xzr, xzr
// C0-C1 <- masked (BH + BL)
sub x19, xzr, x25
sub x20, xzr, x24
and x8, x23, x19
and x9, x26, x19
// C4-C5 <- masked (AH + AL), T0 <- combined carry
and x21, x3, x20
and x22, x4, x20
mul x19, x3, x23
mul x20, x3, x26
and x25, x25, x24
// C0-C1, T0 <- (AH+AL) x (BH+BL), part 1
adds x8, x21, x8
umulh x21, x3, x26
adcs x9, x22, x9
umulh x22, x3, x23
adc x25, x25, xzr
// C2-C5 <- (AH+AL) x (BH+BL), low part
mul x3, x4, x23
umulh x23, x4, x23
adds x20, x20, x22
adc x21, x21, xzr
mul x24, x4, x26
umulh x26, x4, x26
adds x20, x20, x3
adcs x21, x21, x23
adc x22, xzr, xzr
adds x21, x21, x24
adc x22, x22, x26
ldp x3, x4, [x2,#0]
// C2-C5, T0 <- (AH+AL) x (BH+BL), final part
adds x21, x8, x21
umulh x24, x3, x10
umulh x26, x3, x11
adcs x22, x9, x22
mul x8, x3, x10
mul x9, x3, x11
adc x25, x25, xzr
// C0-C1, T1, C7 <- AL x BL
mul x3, x4, x10
umulh x10, x4, x10
adds x9, x9, x24
adc x26, x26, xzr
mul x23, x4, x11
umulh x11, x4, x11
adds x9, x9, x3
adcs x26, x26, x10
adc x24, xzr, xzr
adds x26, x26, x23
adc x24, x24, x11
// C2-C5, T0 <- (AH+AL) x (BH+BL) - ALxBL
mul x3, x5, x12
umulh x10, x5, x12
subs x19, x19, x8
sbcs x20, x20, x9
sbcs x21, x21, x26
mul x4, x5, x13
umulh x23, x5, x13
sbcs x22, x22, x24
sbc x25, x25, xzr
// A0, A1, C6, B0 <- AH x BH
mul x5, x6, x12
umulh x12, x6, x12
adds x4, x4, x10
adc x23, x23, xzr
mul x11, x6, x13
umulh x13, x6, x13
adds x4, x4, x5
adcs x23, x23, x12
adc x10, xzr, xzr
adds x23, x23, x11
adc x10, x10, x13
// C2-C5, T0 <- (AH+AL) x (BH+BL) - ALxBL - AHxBH
subs x19, x19, x3
sbcs x20, x20, x4
sbcs x21, x21, x23
sbcs x22, x22, x10
sbc x25, x25, xzr
adds x19, x19, x26
adcs x20, x20, x24
adcs x21, x21, x3
adcs x22, x22, x4
adcs x23, x25, x23
adc x24, x10, xzr
// x15-x19, x7 <- (AH+AL) x (BH+BL), final step
adds x14, x14, x21
adcs x15, x15, x22
adcs x16, x16, x23
adcs x17, x17, x24
adc x7, x7, xzr
// Load AL
ldp x3, x4, [x0]
ldp x5, x6, [x0,#16]
// Load BL
ldp x10, x11, [x1,#0]
ldp x12, x13, [x1,#16]
// Temporarily store x8 in x2
stp x8, x9, [x2,#0]
// x21-x28 <- AL x BL
// A0-A1 <- AH + AL, T0 <- mask
adds x3, x3, x5
adcs x4, x4, x6
adc x8, xzr, xzr
// C6, T1 <- BH + BL, C7 <- mask
adds x27, x10, x12
adcs x9, x11, x13
adc x28, xzr, xzr
// C0-C1 <- masked (BH + BL)
sub x23, xzr, x8
sub x24, xzr, x28
and x21, x27, x23
and x22, x9, x23
// C4-C5 <- masked (AH + AL), T0 <- combined carry
and x25, x3, x24
and x26, x4, x24
mul x23, x3, x27
mul x24, x3, x9
and x8, x8, x28
// C0-C1, T0 <- (AH+AL) x (BH+BL), part 1
adds x21, x25, x21
umulh x25, x3, x9
adcs x22, x26, x22
umulh x26, x3, x27
adc x8, x8, xzr
// C2-C5 <- (AH+AL) x (BH+BL), low part
mul x3, x4, x27
umulh x27, x4, x27
adds x24, x24, x26
adc x25, x25, xzr
mul x28, x4, x9
umulh x9, x4, x9
adds x24, x24, x3
adcs x25, x25, x27
adc x26, xzr, xzr
adds x25, x25, x28
adc x26, x26, x9
ldp x3, x4, [x0,#0]
// C2-C5, T0 <- (AH+AL) x (BH+BL), final part
adds x25, x21, x25
umulh x28, x3, x10
umulh x9, x3, x11
adcs x26, x22, x26
mul x21, x3, x10
mul x22, x3, x11
adc x8, x8, xzr
// C0-C1, T1, C7 <- AL x BL
mul x3, x4, x10
umulh x10, x4, x10
adds x22, x22, x28
adc x9, x9, xzr
mul x27, x4, x11
umulh x11, x4, x11
adds x22, x22, x3
adcs x9, x9, x10
adc x28, xzr, xzr
adds x9, x9, x27
adc x28, x28, x11
// C2-C5, T0 <- (AH+AL) x (BH+BL) - ALxBL
mul x3, x5, x12
umulh x10, x5, x12
subs x23, x23, x21
sbcs x24, x24, x22
sbcs x25, x25, x9
mul x4, x5, x13
umulh x27, x5, x13
sbcs x26, x26, x28
sbc x8, x8, xzr
// A0, A1, C6, B0 <- AH x BH
mul x5, x6, x12
umulh x12, x6, x12
adds x4, x4, x10
adc x27, x27, xzr
mul x11, x6, x13
umulh x13, x6, x13
adds x4, x4, x5
adcs x27, x27, x12
adc x10, xzr, xzr
adds x27, x27, x11
adc x10, x10, x13
// C2-C5, T0 <- (AH+AL) x (BH+BL) - ALxBL - AHxBH
subs x23, x23, x3
sbcs x24, x24, x4
sbcs x25, x25, x27
sbcs x26, x26, x10
sbc x8, x8, xzr
adds x23, x23, x9
adcs x24, x24, x28
adcs x25, x25, x3
adcs x26, x26, x4
adcs x27, x8, x27
adc x28, x10, xzr
// Restore x8
ldp x8, x9, [x2,#0]
// x8-x10,x20,x15-x17,x19 <- maskd (AH+AL) x (BH+BL) - ALxBL
subs x8, x8, x21
sbcs x9, x9, x22
sbcs x19, x19, x23
sbcs x20, x20, x24
sbcs x14, x14, x25
sbcs x15, x15, x26
sbcs x16, x16, x27
sbcs x17, x17, x28
sbc x7, x7, xzr
// Store ALxBL, low
stp x21, x22, [x2]
stp x23, x24, [x2,#16]
// Load AH
ldp x3, x4, [x0,#32]
ldr x5, [x0,#48]
// Load BH
ldp x10, x11, [x1,#32]
ldr x12, [x1,#48]
adds x8, x8, x25
adcs x9, x9, x26
adcs x19, x19, x27
adcs x20, x20, x28
adc x1, xzr, xzr
add x0, x0, #32
// Temporarily store x8,x9 in x2
stp x8,x9, [x2,#32]
// x21-x28 <- AH x BH
// A0 * B0
mul x21, x3, x10 // C0
umulh x24, x3, x10
// A0 * B1
mul x22, x3, x11
umulh x23, x3, x11
// A1 * B0
mul x8, x4, x10
umulh x9, x4, x10
adds x22, x22, x24
adc x23, x23, xzr
// A0 * B2
mul x27, x3, x12
umulh x28, x3, x12
adds x22, x22, x8 // C1
adcs x23, x23, x9
adc x24, xzr, xzr
// A2 * B0
mul x8, x5, x10
umulh x25, x5, x10
adds x23, x23, x27
adcs x24, x24, x25
adc x25, xzr, xzr
// A1 * B1
mul x27, x4, x11
umulh x9, x4, x11
adds x23, x23, x8
adcs x24, x24, x28
adc x25, x25, xzr
// A1 * B2
mul x8, x4, x12
umulh x28, x4, x12
adds x23, x23, x27 // C2
adcs x24, x24, x9
adc x25, x25, xzr
// A2 * B1
mul x27, x5, x11
umulh x9, x5, x11
adds x24, x24, x8
adcs x25, x25, x28
adc x26, xzr, xzr
// A2 * B2
mul x8, x5, x12
umulh x28, x5, x12
adds x24, x24, x27 // C3
adcs x25, x25, x9
adc x26, x26, xzr
adds x25, x25, x8 // C4
adc x26, x26, x28 // C5
// Restore x8,x9
ldp x8,x9, [x2,#32]
neg x1, x1
// x8-x9,x19,x20,x14-x17 <- (AH+AL) x (BH+BL) - ALxBL - AHxBH
subs x8, x8, x21
sbcs x9, x9, x22
sbcs x19, x19, x23
sbcs x20, x20, x24
sbcs x14, x14, x25
sbcs x15, x15, x26
sbcs x16, x16, xzr
sbcs x17, x17, xzr
sbc x7, x7, xzr
// Store (AH+AL) x (BH+BL) - ALxBL - AHxBH, low
stp x8, x9, [x2,#32]
stp x19, x20, [x2,#48]
adds x1, x1, #1
adcs x14, x14, x21
adcs x15, x15, x22
adcs x16, x16, x23
adcs x17, x17, x24
adcs x25, x7, x25
adc x26, x26, xzr
stp x14, x15, [x2,#64]
stp x16, x17, [x2,#80]
stp x25, x26, [x2,#96]
ldp x19, x20, [x29,#16]
ldp x21, x22, [x29,#32]
ldp x23, x24, [x29,#48]
ldp x25, x26, [x29,#64]
ldp x27, x28, [x29,#80]
ldp x29, x30, [sp],#96
ret
.globl sike_fprdc
.hidden sike_fprdc
.align 4
sike_fprdc:
stp x29, x30, [sp, #-96]!
add x29, sp, xzr
stp x19, x20, [sp,#16]
stp x21, x22, [sp,#32]
stp x23, x24, [sp,#48]
stp x25, x26, [sp,#64]
stp x27, x28, [sp,#80]
ldp x2, x3, [x0,#0] // a[0-1]
// Load the prime constant
adrp x26, .Lp434p1
add x26, x26, :lo12:.Lp434p1
ldp x23, x24, [x26, #0x0]
ldp x25, x26, [x26,#0x10]
// a[0-1] * p434+1
mul x4, x2, x23 // C0
umulh x7, x2, x23
mul x5, x2, x24
umulh x6, x2, x24
mul x10, x3, x23
umulh x11, x3, x23
adds x5, x5, x7
adc x6, x6, xzr
mul x27, x2, x25
umulh x28, x2, x25
adds x5, x5, x10 // C1
adcs x6, x6, x11
adc x7, xzr, xzr
mul x10, x3, x24
umulh x11, x3, x24
adds x6, x6, x27
adcs x7, x7, x28
adc x8, xzr, xzr
mul x27, x2, x26
umulh x28, x2, x26
adds x6, x6, x10 // C2
adcs x7, x7, x11
adc x8, x8, xzr
mul x10, x3, x25
umulh x11, x3, x25
adds x7, x7, x27
adcs x8, x8, x28
adc x9, xzr, xzr
mul x27, x3, x26
umulh x28, x3, x26
adds x7, x7, x10 // C3
adcs x8, x8, x11
adc x9, x9, xzr
adds x8, x8, x27 // C4
adc x9, x9, x28 // C5
ldp x10, x11, [x0, #0x18]
ldp x12, x13, [x0, #0x28]
ldp x14, x15, [x0, #0x38]
ldp x16, x17, [x0, #0x48]
ldp x19, x20, [x0, #0x58]
ldr x21, [x0, #0x68]
adds x10, x10, x4
adcs x11, x11, x5
adcs x12, x12, x6
adcs x13, x13, x7
adcs x14, x14, x8
adcs x15, x15, x9
adcs x22, x16, xzr
adcs x17, x17, xzr
adcs x19, x19, xzr
adcs x20, x20, xzr
adc x21, x21, xzr
ldr x2, [x0,#0x10] // a[2]
// a[2-3] * p434+1
mul x4, x2, x23 // C0
umulh x7, x2, x23
mul x5, x2, x24
umulh x6, x2, x24
mul x0, x10, x23
umulh x3, x10, x23
adds x5, x5, x7
adc x6, x6, xzr
mul x27, x2, x25
umulh x28, x2, x25
adds x5, x5, x0 // C1
adcs x6, x6, x3
adc x7, xzr, xzr
mul x0, x10, x24
umulh x3, x10, x24
adds x6, x6, x27
adcs x7, x7, x28
adc x8, xzr, xzr
mul x27, x2, x26
umulh x28, x2, x26
adds x6, x6, x0 // C2
adcs x7, x7, x3
adc x8, x8, xzr
mul x0, x10, x25
umulh x3, x10, x25
adds x7, x7, x27
adcs x8, x8, x28
adc x9, xzr, xzr
mul x27, x10, x26
umulh x28, x10, x26
adds x7, x7, x0 // C3
adcs x8, x8, x3
adc x9, x9, xzr
adds x8, x8, x27 // C4
adc x9, x9, x28 // C5
adds x12, x12, x4
adcs x13, x13, x5
adcs x14, x14, x6
adcs x15, x15, x7
adcs x16, x22, x8
adcs x17, x17, x9
adcs x22, x19, xzr
adcs x20, x20, xzr
adc x21, x21, xzr
mul x4, x11, x23 // C0
umulh x7, x11, x23
mul x5, x11, x24
umulh x6, x11, x24
mul x10, x12, x23
umulh x3, x12, x23
adds x5, x5, x7
adc x6, x6, xzr
mul x27, x11, x25
umulh x28, x11, x25
adds x5, x5, x10 // C1
adcs x6, x6, x3
adc x7, xzr, xzr
mul x10, x12, x24
umulh x3, x12, x24
adds x6, x6, x27
adcs x7, x7, x28
adc x8, xzr, xzr
mul x27, x11, x26
umulh x28, x11, x26
adds x6, x6, x10 // C2
adcs x7, x7, x3
adc x8, x8, xzr
mul x10, x12, x25
umulh x3, x12, x25
adds x7, x7, x27
adcs x8, x8, x28
adc x9, xzr, xzr
mul x27, x12, x26
umulh x28, x12, x26
adds x7, x7, x10 // C3
adcs x8, x8, x3
adc x9, x9, xzr
adds x8, x8, x27 // C4
adc x9, x9, x28 // C5
adds x14, x14, x4
adcs x15, x15, x5
adcs x16, x16, x6
adcs x17, x17, x7
adcs x19, x22, x8
adcs x20, x20, x9
adc x22, x21, xzr
stp x14, x15, [x1, #0x0] // C0, C1
mul x4, x13, x23 // C0
umulh x10, x13, x23
mul x5, x13, x24
umulh x27, x13, x24
adds x5, x5, x10 // C1
adc x10, xzr, xzr
mul x6, x13, x25
umulh x28, x13, x25
adds x27, x10, x27
adcs x6, x6, x27 // C2
adc x10, xzr, xzr
mul x7, x13, x26
umulh x8, x13, x26
adds x28, x10, x28
adcs x7, x7, x28 // C3
adc x8, x8, xzr // C4
adds x16, x16, x4
adcs x17, x17, x5
adcs x19, x19, x6
adcs x20, x20, x7
adc x21, x22, x8
str x16, [x1, #0x10]
stp x17, x19, [x1, #0x18]
stp x20, x21, [x1, #0x28]
ldp x19, x20, [x29,#16]
ldp x21, x22, [x29,#32]
ldp x23, x24, [x29,#48]
ldp x25, x26, [x29,#64]
ldp x27, x28, [x29,#80]
ldp x29, x30, [sp],#96
ret
.globl sike_fpadd
.hidden sike_fpadd
.align 4
sike_fpadd:
stp x29,x30, [sp,#-16]!
add x29, sp, #0
ldp x3, x4, [x0,#0]
ldp x5, x6, [x0,#16]
ldp x7, x8, [x0,#32]
ldr x9, [x0,#48]
ldp x11, x12, [x1,#0]
ldp x13, x14, [x1,#16]
ldp x15, x16, [x1,#32]
ldr x17, [x1,#48]
// Add a + b
adds x3, x3, x11
adcs x4, x4, x12
adcs x5, x5, x13
adcs x6, x6, x14
adcs x7, x7, x15
adcs x8, x8, x16
adc x9, x9, x17
// Subtract 2xp434
adrp x17, .Lp434x2
add x17, x17, :lo12:.Lp434x2
ldp x11, x12, [x17, #0]
ldp x13, x14, [x17, #16]
ldp x15, x16, [x17, #32]
subs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x12
sbcs x6, x6, x13
sbcs x7, x7, x14
sbcs x8, x8, x15
sbcs x9, x9, x16
sbc x0, xzr, xzr // x0 can be reused now
// Add 2xp434 anded with the mask in x0
and x11, x11, x0
and x12, x12, x0
and x13, x13, x0
and x14, x14, x0
and x15, x15, x0
and x16, x16, x0
adds x3, x3, x11
adcs x4, x4, x12
adcs x5, x5, x12
adcs x6, x6, x13
adcs x7, x7, x14
adcs x8, x8, x15
adc x9, x9, x16
stp x3, x4, [x2,#0]
stp x5, x6, [x2,#16]
stp x7, x8, [x2,#32]
str x9, [x2,#48]
ldp x29, x30, [sp],#16
ret
.globl sike_fpsub
.hidden sike_fpsub
.align 4
sike_fpsub:
stp x29, x30, [sp,#-16]!
add x29, sp, #0
ldp x3, x4, [x0,#0]
ldp x5, x6, [x0,#16]
ldp x7, x8, [x0,#32]
ldr x9, [x0,#48]
ldp x11, x12, [x1,#0]
ldp x13, x14, [x1,#16]
ldp x15, x16, [x1,#32]
ldr x17, [x1,#48]
// Subtract a - b
subs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x13
sbcs x6, x6, x14
sbcs x7, x7, x15
sbcs x8, x8, x16
sbcs x9, x9, x17
sbc x0, xzr, xzr
// Add 2xp434 anded with the mask in x0
adrp x17, .Lp434x2
add x17, x17, :lo12:.Lp434x2
// First half
ldp x11, x12, [x17, #0]
ldp x13, x14, [x17, #16]
ldp x15, x16, [x17, #32]
// Add 2xp434 anded with the mask in x0
and x11, x11, x0
and x12, x12, x0
and x13, x13, x0
and x14, x14, x0
and x15, x15, x0
and x16, x16, x0
adds x3, x3, x11
adcs x4, x4, x12
adcs x5, x5, x12
adcs x6, x6, x13
adcs x7, x7, x14
adcs x8, x8, x15
adc x9, x9, x16
stp x3, x4, [x2,#0]
stp x5, x6, [x2,#16]
stp x7, x8, [x2,#32]
str x9, [x2,#48]
ldp x29, x30, [sp],#16
ret
.globl sike_mpadd_asm
.hidden sike_mpadd_asm
.align 4
sike_mpadd_asm:
stp x29, x30, [sp,#-16]!
add x29, sp, #0
ldp x3, x4, [x0,#0]
ldp x5, x6, [x0,#16]
ldp x7, x8, [x0,#32]
ldr x9, [x0,#48]
ldp x11, x12, [x1,#0]
ldp x13, x14, [x1,#16]
ldp x15, x16, [x1,#32]
ldr x17, [x1,#48]
adds x3, x3, x11
adcs x4, x4, x12
adcs x5, x5, x13
adcs x6, x6, x14
adcs x7, x7, x15
adcs x8, x8, x16
adc x9, x9, x17
stp x3, x4, [x2,#0]
stp x5, x6, [x2,#16]
stp x7, x8, [x2,#32]
str x9, [x2,#48]
ldp x29, x30, [sp],#16
ret
.globl sike_mpsubx2_asm
.hidden sike_mpsubx2_asm
.align 4
sike_mpsubx2_asm:
stp x29, x30, [sp,#-16]!
add x29, sp, #0
ldp x3, x4, [x0,#0]
ldp x5, x6, [x0,#16]
ldp x11, x12, [x1,#0]
ldp x13, x14, [x1,#16]
subs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x13
sbcs x6, x6, x14
ldp x7, x8, [x0,#32]
ldp x9, x10, [x0,#48]
ldp x11, x12, [x1,#32]
ldp x13, x14, [x1,#48]
sbcs x7, x7, x11
sbcs x8, x8, x12
sbcs x9, x9, x13
sbcs x10, x10, x14
stp x3, x4, [x2,#0]
stp x5, x6, [x2,#16]
stp x7, x8, [x2,#32]
stp x9, x10, [x2,#48]
ldp x3, x4, [x0,#64]
ldp x5, x6, [x0,#80]
ldp x11, x12, [x1,#64]
ldp x13, x14, [x1,#80]
sbcs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x13
sbcs x6, x6, x14
ldp x7, x8, [x0,#96]
ldp x11, x12, [x1,#96]
sbcs x7, x7, x11
sbcs x8, x8, x12
sbc x0, xzr, xzr
stp x3, x4, [x2,#64]
stp x5, x6, [x2,#80]
stp x7, x8, [x2,#96]
ldp x29, x30, [sp],#16
ret
.globl sike_mpdblsubx2_asm
.hidden sike_mpdblsubx2_asm
.align 4
sike_mpdblsubx2_asm:
stp x29, x30, [sp, #-16]!
add x29, sp, #0
ldp x3, x4, [x2, #0]
ldp x5, x6, [x2,#16]
ldp x7, x8, [x2,#32]
ldp x11, x12, [x0, #0]
ldp x13, x14, [x0,#16]
ldp x15, x16, [x0,#32]
subs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x13
sbcs x6, x6, x14
sbcs x7, x7, x15
sbcs x8, x8, x16
// x9 stores carry
adc x9, xzr, xzr
ldp x11, x12, [x1, #0]
ldp x13, x14, [x1,#16]
ldp x15, x16, [x1,#32]
subs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x13
sbcs x6, x6, x14
sbcs x7, x7, x15
sbcs x8, x8, x16
adc x9, x9, xzr
stp x3, x4, [x2, #0]
stp x5, x6, [x2,#16]
stp x7, x8, [x2,#32]
ldp x3, x4, [x2,#48]
ldp x5, x6, [x2,#64]
ldp x7, x8, [x2,#80]
ldp x11, x12, [x0,#48]
ldp x13, x14, [x0,#64]
ldp x15, x16, [x0,#80]
// x9 = 2 - x9
neg x9, x9
add x9, x9, #2
subs x3, x3, x9
sbcs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x13
sbcs x6, x6, x14
sbcs x7, x7, x15
sbcs x8, x8, x16
adc x9, xzr, xzr
ldp x11, x12, [x1,#48]
ldp x13, x14, [x1,#64]
ldp x15, x16, [x1,#80]
subs x3, x3, x11
sbcs x4, x4, x12
sbcs x5, x5, x13
sbcs x6, x6, x14
sbcs x7, x7, x15
sbcs x8, x8, x16
adc x9, x9, xzr
stp x3, x4, [x2,#48]
stp x5, x6, [x2,#64]
stp x7, x8, [x2,#80]
ldp x3, x4, [x2,#96]
ldp x11, x12, [x0,#96]
ldp x13, x14, [x1,#96]
// x9 = 2 - x9
neg x9, x9
add x9, x9, #2
subs x3, x3, x9
sbcs x3, x3, x11
sbcs x4, x4, x12
subs x3, x3, x13
sbc x4, x4, x14
stp x3, x4, [x2,#96]
ldp x29, x30, [sp],#16
ret
.section .note.GNU-stack,"",%progbits
#endif
#endif // !OPENSSL_NO_ASM
|