/kernel/linux/linux-5.10/lib/crypto/ |
H A D | curve25519-hacl64.c | 546 ladder_smallloop_cmult_small_loop_step(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop_step() argument 551 point_swap_conditional(nq, nqpq, bit0); in ladder_smallloop_cmult_small_loop_step() 552 addanddouble_fmonty(nq2, nqpq2, nq, nqpq, q); in ladder_smallloop_cmult_small_loop_step() 558 ladder_smallloop_cmult_small_loop_double_step(u64 *nq, u64 *nqpq, u64 *nq2, in ladder_smallloop_cmult_small_loop_double_step() argument 562 ladder_smallloop_cmult_small_loop_step(nq, nqpq, nq2, nqpq2, q, byt); in ladder_smallloop_cmult_small_loop_double_step() 564 ladder_smallloop_cmult_small_loop_step(nq2, nqpq2, nq, nqpq, q, byt1); in ladder_smallloop_cmult_small_loop_double_step() 568 ladder_smallloop_cmult_small_loop(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop() argument 572 ladder_smallloop_cmult_small_loop_double_step(nq, nqpq, nq2, in ladder_smallloop_cmult_small_loop() 579 u64 *nqpq, u64 *nq2, in ladder_bigloop_cmult_big_loop() 585 ladder_smallloop_cmult_small_loop(nq, nqpq, nq in ladder_bigloop_cmult_big_loop() 578 ladder_bigloop_cmult_big_loop(u8 *n1, u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, u64 *q, u32 i) ladder_bigloop_cmult_big_loop() argument 594 u64 *nqpq = point_buf + 10; ladder_cmult() local [all...] |
/kernel/linux/linux-6.6/lib/crypto/ |
H A D | curve25519-hacl64.c | 544 ladder_smallloop_cmult_small_loop_step(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop_step() argument 549 point_swap_conditional(nq, nqpq, bit0); in ladder_smallloop_cmult_small_loop_step() 550 addanddouble_fmonty(nq2, nqpq2, nq, nqpq, q); in ladder_smallloop_cmult_small_loop_step() 556 ladder_smallloop_cmult_small_loop_double_step(u64 *nq, u64 *nqpq, u64 *nq2, in ladder_smallloop_cmult_small_loop_double_step() argument 560 ladder_smallloop_cmult_small_loop_step(nq, nqpq, nq2, nqpq2, q, byt); in ladder_smallloop_cmult_small_loop_double_step() 562 ladder_smallloop_cmult_small_loop_step(nq2, nqpq2, nq, nqpq, q, byt1); in ladder_smallloop_cmult_small_loop_double_step() 566 ladder_smallloop_cmult_small_loop(u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, in ladder_smallloop_cmult_small_loop() argument 570 ladder_smallloop_cmult_small_loop_double_step(nq, nqpq, nq2, in ladder_smallloop_cmult_small_loop() 577 u64 *nqpq, u64 *nq2, in ladder_bigloop_cmult_big_loop() 583 ladder_smallloop_cmult_small_loop(nq, nqpq, nq in ladder_bigloop_cmult_big_loop() 576 ladder_bigloop_cmult_big_loop(u8 *n1, u64 *nq, u64 *nqpq, u64 *nq2, u64 *nqpq2, u64 *q, u32 i) ladder_bigloop_cmult_big_loop() argument 592 u64 *nqpq = point_buf + 10; ladder_cmult() local [all...] |