/kernel/linux/linux-6.6/io_uring/ |
H A D | tctx.c | 13 #include "tctx.h" 49 struct io_uring_task *tctx = tsk->io_uring; in __io_uring_free() local 51 WARN_ON_ONCE(!xa_empty(&tctx->xa)); in __io_uring_free() 52 WARN_ON_ONCE(tctx->io_wq); in __io_uring_free() 53 WARN_ON_ONCE(tctx->cached_refs); in __io_uring_free() 55 percpu_counter_destroy(&tctx->inflight); in __io_uring_free() 56 kfree(tctx); in __io_uring_free() 63 struct io_uring_task *tctx; in io_uring_alloc_task_context() local 66 tctx = kzalloc(sizeof(*tctx), GFP_KERNE in io_uring_alloc_task_context() 96 struct io_uring_task *tctx = current->io_uring; __io_uring_add_tctx_node() local 157 struct io_uring_task *tctx = current->io_uring; io_uring_del_tctx_node() local 178 io_uring_clean_tctx(struct io_uring_task *tctx) io_uring_clean_tctx() argument 200 struct io_uring_task *tctx = current->io_uring; io_uring_unreg_ringfd() local 211 io_ring_add_registered_file(struct io_uring_task *tctx, struct file *file, int start, int end) io_ring_add_registered_file() argument 226 io_ring_add_registered_fd(struct io_uring_task *tctx, int fd, int start, int end) io_ring_add_registered_fd() argument 258 struct io_uring_task *tctx; io_ringfd_register() local 316 struct io_uring_task *tctx = current->io_uring; io_ringfd_unregister() local [all...] |
H A D | cancel.c | 15 #include "tctx.h" 75 static int io_async_cancel_one(struct io_uring_task *tctx, in io_async_cancel_one() argument 82 if (!tctx || !tctx->io_wq) in io_async_cancel_one() 86 cancel_ret = io_wq_cancel_cb(tctx->io_wq, io_cancel_cb, cd, all); in io_async_cancel_one() 102 int io_try_cancel(struct io_uring_task *tctx, struct io_cancel_data *cd, in io_try_cancel() argument 108 WARN_ON_ONCE(!io_wq_current_is_worker() && tctx != current->io_uring); in io_try_cancel() 110 ret = io_async_cancel_one(tctx, cd); in io_try_cancel() 157 struct io_uring_task *tctx, in __io_async_cancel() 166 ret = io_try_cancel(tctx, c in __io_async_cancel() 156 __io_async_cancel(struct io_cancel_data *cd, struct io_uring_task *tctx, unsigned int issue_flags) __io_async_cancel() argument 178 struct io_uring_task *tctx = node->task->io_uring; __io_async_cancel() local 201 struct io_uring_task *tctx = req->task->io_uring; io_async_cancel() local 238 __io_sync_cancel(struct io_uring_task *tctx, struct io_cancel_data *cd, int fd) __io_sync_cancel() argument [all...] |
H A D | tctx.h | 14 void io_uring_clean_tctx(struct io_uring_task *tctx); 27 struct io_uring_task *tctx = current->io_uring; in io_uring_add_tctx_node() local 29 if (likely(tctx && tctx->last == ctx)) in io_uring_add_tctx_node()
|
/kernel/linux/linux-6.6/crypto/ |
H A D | hctr2.c | 83 static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx, in hctr2_hashed_tweaklen() argument 86 u8 *p = (u8 *)tctx + sizeof(*tctx); in hctr2_hashed_tweaklen() 89 p += crypto_shash_statesize(tctx->polyval); in hctr2_hashed_tweaklen() 93 static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx, in hctr2_hashed_tweak() argument 96 return (u8 *)rctx + tctx->hashed_tweak_offset; in hctr2_hashed_tweak() 109 static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder) in hctr2_hash_tweaklen() argument 115 shash->tfm = tctx->polyval; in hctr2_hash_tweaklen() 126 return crypto_shash_export(shash, hctr2_hashed_tweaklen(tctx, has_remainder)); in hctr2_hash_tweaklen() 132 struct hctr2_tfm_ctx *tctx in hctr2_setkey() local 173 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); hctr2_hash_tweak() local 232 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); hctr2_finish() local 268 const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); hctr2_crypt() local 338 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); hctr2_init_tfm() local 387 struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); hctr2_exit_tfm() local [all...] |
H A D | essiv.c | 69 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in essiv_skcipher_setkey() local 73 crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK); in essiv_skcipher_setkey() 74 crypto_skcipher_set_flags(tctx->u.skcipher, in essiv_skcipher_setkey() 77 err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen); in essiv_skcipher_setkey() 81 err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt); in essiv_skcipher_setkey() 85 crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK); in essiv_skcipher_setkey() 86 crypto_cipher_set_flags(tctx->essiv_cipher, in essiv_skcipher_setkey() 89 return crypto_cipher_setkey(tctx->essiv_cipher, salt, in essiv_skcipher_setkey() 90 crypto_shash_digestsize(tctx->hash)); in essiv_skcipher_setkey() 96 struct essiv_tfm_ctx *tctx in essiv_aead_setkey() local 129 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_setauthsize() local 144 const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); essiv_skcipher_crypt() local 186 const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_crypt() local 270 essiv_init_tfm(struct essiv_instance_ctx *ictx, struct essiv_tfm_ctx *tctx) essiv_init_tfm() argument 301 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); essiv_skcipher_init_tfm() local 326 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_init_tfm() local 357 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); essiv_skcipher_exit_tfm() local 366 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_exit_tfm() local [all...] |
H A D | adiantum.c | 120 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in adiantum_setkey() local 132 crypto_skcipher_clear_flags(tctx->streamcipher, CRYPTO_TFM_REQ_MASK); in adiantum_setkey() 133 crypto_skcipher_set_flags(tctx->streamcipher, in adiantum_setkey() 136 err = crypto_skcipher_setkey(tctx->streamcipher, key, keylen); in adiantum_setkey() 142 crypto_skcipher_reqsize(tctx->streamcipher), GFP_KERNEL); in adiantum_setkey() 148 skcipher_request_set_tfm(&data->req, tctx->streamcipher); in adiantum_setkey() 160 crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK); in adiantum_setkey() 161 crypto_cipher_set_flags(tctx->blockcipher, in adiantum_setkey() 164 err = crypto_cipher_setkey(tctx->blockcipher, keyp, in adiantum_setkey() 171 poly1305_core_setkey(&tctx in adiantum_setkey() 222 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_hash_header() local 251 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_hash_message() local 285 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_finish() local 324 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_crypt() local 400 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_init_tfm() local 450 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_exit_tfm() local [all...] |
H A D | vmac.c | 399 static void vhash_blocks(const struct vmac_tfm_ctx *tctx, in vhash_blocks() argument 403 const u64 *kptr = tctx->nhkey; in vhash_blocks() 404 const u64 pkh = tctx->polykey[0]; in vhash_blocks() 405 const u64 pkl = tctx->polykey[1]; in vhash_blocks() 433 struct vmac_tfm_ctx *tctx = crypto_shash_ctx(tfm); in vmac_setkey() local 442 err = crypto_cipher_setkey(tctx->cipher, key, keylen); in vmac_setkey() 448 for (i = 0; i < ARRAY_SIZE(tctx->nhkey); i += 2) { in vmac_setkey() 449 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 450 tctx->nhkey[i] = be64_to_cpu(out[0]); in vmac_setkey() 451 tctx in vmac_setkey() 482 const struct vmac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); vmac_init() local 494 const struct vmac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); vmac_update() local 535 vhash_final(const struct vmac_tfm_ctx *tctx, struct vmac_desc_ctx *dctx) vhash_final() argument 564 const struct vmac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); vmac_final() local 601 struct vmac_tfm_ctx *tctx = crypto_tfm_ctx(tfm); vmac_init_tfm() local 614 struct vmac_tfm_ctx *tctx = crypto_tfm_ctx(tfm); vmac_exit_tfm() local [all...] |
H A D | xxhash_generic.c | 23 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(tfm); in xxhash64_setkey() local 25 if (keylen != sizeof(tctx->seed)) in xxhash64_setkey() 27 tctx->seed = get_unaligned_le64(key); in xxhash64_setkey() 33 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in xxhash64_init() local 36 xxh64_reset(&dctx->xxhstate, tctx->seed); in xxhash64_init() 63 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in xxhash64_digest() local 65 put_unaligned_le64(xxh64(data, length, tctx->seed), out); in xxhash64_digest()
|
/kernel/linux/linux-5.10/drivers/crypto/ |
H A D | geode-aes.c | 69 geode_aes_crypt(const struct geode_aes_tfm_ctx *tctx, const void *src, in geode_aes_crypt() argument 96 _writefield(AES_WRITEKEY0_REG, tctx->key); in geode_aes_crypt() 112 struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); in geode_setkey_cip() local 114 tctx->keylen = len; in geode_setkey_cip() 117 memcpy(tctx->key, key, len); in geode_setkey_cip() 128 tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in geode_setkey_cip() 129 tctx->fallback.cip->base.crt_flags |= in geode_setkey_cip() 132 return crypto_cipher_setkey(tctx->fallback.cip, key, len); in geode_setkey_cip() 138 struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in geode_setkey_skcipher() local 140 tctx in geode_setkey_skcipher() 165 const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); geode_encrypt() local 180 const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); geode_decrypt() local 194 struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); fallback_init_cip() local 209 struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); fallback_exit_cip() local 240 struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); geode_init_skcipher() local 257 struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); geode_exit_skcipher() local 265 const struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); geode_skcipher_crypt() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/ |
H A D | geode-aes.c | 70 geode_aes_crypt(const struct geode_aes_tfm_ctx *tctx, const void *src, in geode_aes_crypt() argument 97 _writefield(AES_WRITEKEY0_REG, tctx->key); in geode_aes_crypt() 113 struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); in geode_setkey_cip() local 115 tctx->keylen = len; in geode_setkey_cip() 118 memcpy(tctx->key, key, len); in geode_setkey_cip() 129 tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in geode_setkey_cip() 130 tctx->fallback.cip->base.crt_flags |= in geode_setkey_cip() 133 return crypto_cipher_setkey(tctx->fallback.cip, key, len); in geode_setkey_cip() 139 struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in geode_setkey_skcipher() local 141 tctx in geode_setkey_skcipher() 166 const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); geode_encrypt() local 181 const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); geode_decrypt() local 195 struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); fallback_init_cip() local 210 struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm); fallback_exit_cip() local 241 struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); geode_init_skcipher() local 258 struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); geode_exit_skcipher() local 266 const struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); geode_skcipher_crypt() local [all...] |
/kernel/linux/linux-5.10/crypto/ |
H A D | essiv.c | 68 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in essiv_skcipher_setkey() local 72 crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK); in essiv_skcipher_setkey() 73 crypto_skcipher_set_flags(tctx->u.skcipher, in essiv_skcipher_setkey() 76 err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen); in essiv_skcipher_setkey() 80 err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt); in essiv_skcipher_setkey() 84 crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK); in essiv_skcipher_setkey() 85 crypto_cipher_set_flags(tctx->essiv_cipher, in essiv_skcipher_setkey() 88 return crypto_cipher_setkey(tctx->essiv_cipher, salt, in essiv_skcipher_setkey() 89 crypto_shash_digestsize(tctx->hash)); in essiv_skcipher_setkey() 95 struct essiv_tfm_ctx *tctx in essiv_aead_setkey() local 128 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_setauthsize() local 143 const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); essiv_skcipher_crypt() local 185 const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_crypt() local 269 essiv_init_tfm(struct essiv_instance_ctx *ictx, struct essiv_tfm_ctx *tctx) essiv_init_tfm() argument 300 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); essiv_skcipher_init_tfm() local 325 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_init_tfm() local 356 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); essiv_skcipher_exit_tfm() local 365 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm); essiv_aead_exit_tfm() local [all...] |
H A D | tgr192.c | 462 static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data) in tgr192_transform() argument 472 a = aa = tctx->a; in tgr192_transform() 473 b = bb = tctx->b; in tgr192_transform() 474 c = cc = tctx->c; in tgr192_transform() 488 tctx->a = a; in tgr192_transform() 489 tctx->b = b; in tgr192_transform() 490 tctx->c = c; in tgr192_transform() 495 struct tgr192_ctx *tctx = shash_desc_ctx(desc); in tgr192_init() local 497 tctx->a = 0x0123456789abcdefULL; in tgr192_init() 498 tctx in tgr192_init() 512 struct tgr192_ctx *tctx = shash_desc_ctx(desc); tgr192_update() local 552 struct tgr192_ctx *tctx = shash_desc_ctx(desc); tgr192_final() local [all...] |
H A D | adiantum.c | 119 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); in adiantum_setkey() local 131 crypto_skcipher_clear_flags(tctx->streamcipher, CRYPTO_TFM_REQ_MASK); in adiantum_setkey() 132 crypto_skcipher_set_flags(tctx->streamcipher, in adiantum_setkey() 135 err = crypto_skcipher_setkey(tctx->streamcipher, key, keylen); in adiantum_setkey() 141 crypto_skcipher_reqsize(tctx->streamcipher), GFP_KERNEL); in adiantum_setkey() 147 skcipher_request_set_tfm(&data->req, tctx->streamcipher); in adiantum_setkey() 159 crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK); in adiantum_setkey() 160 crypto_cipher_set_flags(tctx->blockcipher, in adiantum_setkey() 163 err = crypto_cipher_setkey(tctx->blockcipher, keyp, in adiantum_setkey() 170 poly1305_core_setkey(&tctx in adiantum_setkey() 221 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_hash_header() local 250 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_hash_message() local 284 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_finish() local 324 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_crypt() local 400 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_init_tfm() local 450 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm); adiantum_exit_tfm() local [all...] |
H A D | vmac.c | 398 static void vhash_blocks(const struct vmac_tfm_ctx *tctx, in vhash_blocks() argument 402 const u64 *kptr = tctx->nhkey; in vhash_blocks() 403 const u64 pkh = tctx->polykey[0]; in vhash_blocks() 404 const u64 pkl = tctx->polykey[1]; in vhash_blocks() 432 struct vmac_tfm_ctx *tctx = crypto_shash_ctx(tfm); in vmac_setkey() local 441 err = crypto_cipher_setkey(tctx->cipher, key, keylen); in vmac_setkey() 447 for (i = 0; i < ARRAY_SIZE(tctx->nhkey); i += 2) { in vmac_setkey() 448 crypto_cipher_encrypt_one(tctx->cipher, (u8 *)out, in); in vmac_setkey() 449 tctx->nhkey[i] = be64_to_cpu(out[0]); in vmac_setkey() 450 tctx in vmac_setkey() 481 const struct vmac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); vmac_init() local 493 const struct vmac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); vmac_update() local 534 vhash_final(const struct vmac_tfm_ctx *tctx, struct vmac_desc_ctx *dctx) vhash_final() argument 563 const struct vmac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); vmac_final() local 600 struct vmac_tfm_ctx *tctx = crypto_tfm_ctx(tfm); vmac_init_tfm() local 613 struct vmac_tfm_ctx *tctx = crypto_tfm_ctx(tfm); vmac_exit_tfm() local [all...] |
H A D | xxhash_generic.c | 23 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(tfm); in xxhash64_setkey() local 25 if (keylen != sizeof(tctx->seed)) in xxhash64_setkey() 27 tctx->seed = get_unaligned_le64(key); in xxhash64_setkey() 33 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in xxhash64_init() local 36 xxh64_reset(&dctx->xxhstate, tctx->seed); in xxhash64_init() 63 struct xxhash64_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in xxhash64_digest() local 65 put_unaligned_le64(xxh64(data, length, tctx->seed), out); in xxhash64_digest()
|
H A D | blake2b_generic.c | 150 struct blake2b_tfm_ctx *tctx = crypto_shash_ctx(tfm); in blake2b_setkey() local 155 memcpy(tctx->key, key, keylen); in blake2b_setkey() 156 tctx->keylen = keylen; in blake2b_setkey() 163 struct blake2b_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in blake2b_init() local 171 state->h[0] ^= 0x01010000 | tctx->keylen << 8 | digestsize; in blake2b_init() 173 if (tctx->keylen) { in blake2b_init() 178 memcpy(state->buf, tctx->key, tctx->keylen); in blake2b_init()
|
/kernel/linux/linux-6.6/drivers/crypto/intel/keembay/ |
H A D | keembay-ocs-ecc.c | 203 static struct ocs_ecc_dev *kmb_ocs_ecc_find_dev(struct ocs_ecc_ctx *tctx) in kmb_ocs_ecc_find_dev() argument 205 if (tctx->ecc_dev) in kmb_ocs_ecc_find_dev() 206 return tctx->ecc_dev; in kmb_ocs_ecc_find_dev() 211 tctx->ecc_dev = list_first_entry(&ocs_ecc.dev_list, struct ocs_ecc_dev, in kmb_ocs_ecc_find_dev() 216 return tctx->ecc_dev; in kmb_ocs_ecc_find_dev() 538 struct ocs_ecc_ctx *tctx = kpp_tfm_ctx(tfm); in kmb_ocs_ecdh_set_secret() local 547 if (params.key_size > digits_to_bytes(tctx->curve->g.ndigits)) { in kmb_ocs_ecdh_set_secret() 554 rc = kmb_ecc_gen_privkey(tctx->curve, tctx->private_key); in kmb_ocs_ecdh_set_secret() 558 rc = kmb_ecc_is_key_valid(tctx in kmb_ocs_ecdh_set_secret() 575 kmb_ecc_do_shared_secret(struct ocs_ecc_ctx *tctx, struct kpp_request *req) kmb_ecc_do_shared_secret() argument 658 kmb_ecc_do_public_key(struct ocs_ecc_ctx *tctx, struct kpp_request *req) kmb_ecc_do_public_key() argument 709 struct ocs_ecc_ctx *tctx = kmb_ocs_ecc_tctx(req); kmb_ocs_ecc_do_one_request() local 725 struct ocs_ecc_ctx *tctx = kmb_ocs_ecc_tctx(req); kmb_ocs_ecdh_generate_public_key() local 750 struct ocs_ecc_ctx *tctx = kmb_ocs_ecc_tctx(req); kmb_ocs_ecdh_compute_shared_secret() local 776 kmb_ecc_tctx_init(struct ocs_ecc_ctx *tctx, unsigned int curve_id) kmb_ecc_tctx_init() argument 797 struct ocs_ecc_ctx *tctx = kpp_tfm_ctx(tfm); kmb_ocs_ecdh_nist_p256_init_tfm() local 804 struct ocs_ecc_ctx *tctx = kpp_tfm_ctx(tfm); kmb_ocs_ecdh_nist_p384_init_tfm() local 811 struct ocs_ecc_ctx *tctx = kpp_tfm_ctx(tfm); kmb_ocs_ecdh_exit_tfm() local 818 struct ocs_ecc_ctx *tctx = kpp_tfm_ctx(tfm); kmb_ocs_ecdh_max_size() local [all...] |
H A D | keembay-ocs-aes-core.c | 119 static struct ocs_aes_dev *kmb_ocs_aes_find_dev(struct ocs_aes_tctx *tctx) in kmb_ocs_aes_find_dev() argument 125 if (tctx->aes_dev) { in kmb_ocs_aes_find_dev() 126 aes_dev = tctx->aes_dev; in kmb_ocs_aes_find_dev() 132 tctx->aes_dev = aes_dev; in kmb_ocs_aes_find_dev() 165 static int save_key(struct ocs_aes_tctx *tctx, const u8 *in_key, size_t key_len, in save_key() argument 174 memcpy(tctx->key, in_key, key_len); in save_key() 175 tctx->key_len = key_len; in save_key() 176 tctx->cipher = cipher; in save_key() 185 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); in kmb_ocs_sk_set_key() local 188 tctx in kmb_ocs_sk_set_key() 207 struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm); kmb_ocs_aead_set_key() local 315 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); kmb_ocs_sk_common() local 378 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); kmb_ocs_sk_dma_cleanup() local 402 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); kmb_ocs_sk_prepare_inplace() local 451 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); kmb_ocs_sk_prepare_notinplace() local 518 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); kmb_ocs_sk_run() local 640 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); kmb_ocs_aead_common() local 684 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); kmb_ocs_aead_dma_cleanup() local 721 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); kmb_ocs_aead_dma_prepare() local 889 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); kmb_ocs_aead_run() local 951 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); kmb_ocs_aes_sk_do_one_request() local 977 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); kmb_ocs_aes_aead_do_one_request() local 1151 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); ocs_aes_init_tfm() local 1173 clear_key(struct ocs_aes_tctx *tctx) clear_key() argument 1185 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm); ocs_exit_tfm() local 1198 struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm); ocs_aes_aead_cra_init() local 1248 struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm); ocs_aead_cra_exit() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/rockchip/ |
H A D | rk3288_crypto_ahash.c | 80 struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm); in rk_ahash_reg_init() local 81 struct rk_crypto_info *dev = tctx->dev; in rk_ahash_reg_init() 198 struct rk_ahash_ctx *tctx = crypto_tfm_ctx(req->base.tfm); in rk_ahash_digest() local 199 struct rk_crypto_info *dev = tctx->dev; in rk_ahash_digest() 223 struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm); in rk_hash_prepare() local 226 ret = dma_map_sg(tctx->dev->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE); in rk_hash_prepare() 240 struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm); in rk_hash_unprepare() local 242 dma_unmap_sg(tctx->dev->dev, areq->src, rctx->nrsg, DMA_TO_DEVICE); in rk_hash_unprepare() 251 struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm); in rk_hash_run() local 277 reinit_completion(&tctx in rk_hash_run() 318 struct rk_ahash_ctx *tctx = crypto_tfm_ctx(tfm); rk_cra_hash_init() local 349 struct rk_ahash_ctx *tctx = crypto_tfm_ctx(tfm); rk_cra_hash_exit() local [all...] |
/kernel/linux/linux-6.6/arch/arm64/crypto/ |
H A D | polyval-ce-glue.c | 75 struct polyval_tfm_ctx *tctx = crypto_shash_ctx(tfm); in polyval_arm64_setkey() local 81 memcpy(tctx->key_powers[NUM_KEY_POWERS-1], key, POLYVAL_BLOCK_SIZE); in polyval_arm64_setkey() 84 memcpy(tctx->key_powers[i], key, POLYVAL_BLOCK_SIZE); in polyval_arm64_setkey() 85 internal_polyval_mul(tctx->key_powers[i], in polyval_arm64_setkey() 86 tctx->key_powers[i+1]); in polyval_arm64_setkey() 105 const struct polyval_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in polyval_arm64_update() local 122 tctx->key_powers[NUM_KEY_POWERS-1]); in polyval_arm64_update() 128 internal_polyval_update(tctx, src, nblocks, dctx->buffer); in polyval_arm64_update() 146 const struct polyval_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in polyval_arm64_final() local 150 tctx in polyval_arm64_final() [all...] |
/kernel/linux/linux-6.6/arch/x86/crypto/ |
H A D | polyval-clmulni_glue.c | 84 struct polyval_tfm_ctx *tctx = polyval_tfm_ctx(tfm); in polyval_x86_setkey() local 90 memcpy(tctx->key_powers[NUM_KEY_POWERS-1], key, POLYVAL_BLOCK_SIZE); in polyval_x86_setkey() 93 memcpy(tctx->key_powers[i], key, POLYVAL_BLOCK_SIZE); in polyval_x86_setkey() 94 internal_polyval_mul(tctx->key_powers[i], in polyval_x86_setkey() 95 tctx->key_powers[i+1]); in polyval_x86_setkey() 114 const struct polyval_tfm_ctx *tctx = polyval_tfm_ctx(desc->tfm); in polyval_x86_update() local 131 tctx->key_powers[NUM_KEY_POWERS-1]); in polyval_x86_update() 137 internal_polyval_update(tctx, src, nblocks, dctx->buffer); in polyval_x86_update() 155 const struct polyval_tfm_ctx *tctx = polyval_tfm_ctx(desc->tfm); in polyval_x86_final() local 159 tctx in polyval_x86_final() [all...] |
/kernel/linux/linux-5.10/include/crypto/internal/ |
H A D | blake2s.h | 89 struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(tfm); in crypto_blake2s_setkey() local 94 memcpy(tctx->key, key, keylen); in crypto_blake2s_setkey() 95 tctx->keylen = keylen; in crypto_blake2s_setkey() 102 const struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in crypto_blake2s_init() local 106 __blake2s_init(state, outlen, tctx->key, tctx->keylen); in crypto_blake2s_init()
|
/kernel/linux/linux-6.6/include/crypto/internal/ |
H A D | blake2b.h | 75 struct blake2b_tfm_ctx *tctx = crypto_shash_ctx(tfm); in crypto_blake2b_setkey() local 80 memcpy(tctx->key, key, keylen); in crypto_blake2b_setkey() 81 tctx->keylen = keylen; in crypto_blake2b_setkey() 88 const struct blake2b_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in crypto_blake2b_init() local 92 __blake2b_init(state, outlen, tctx->key, tctx->keylen); in crypto_blake2b_init()
|
/kernel/linux/linux-6.6/drivers/crypto/aspeed/ |
H A D | aspeed-hace-hash.c | 352 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); in aspeed_ahash_hmac_resume() local 353 struct aspeed_sha_hmac_ctx *bctx = tctx->base; in aspeed_ahash_hmac_resume() 538 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); in aspeed_ahash_do_request() local 539 struct aspeed_hace_dev *hace_dev = tctx->hace_dev; in aspeed_ahash_do_request() 562 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); in aspeed_ahash_prepare_request() local 563 struct aspeed_hace_dev *hace_dev = tctx->hace_dev; in aspeed_ahash_prepare_request() 585 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); in aspeed_sham_update() local 586 struct aspeed_hace_dev *hace_dev = tctx->hace_dev; in aspeed_sham_update() 626 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); in aspeed_sham_final() local 627 struct aspeed_hace_dev *hace_dev = tctx in aspeed_sham_final() 640 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); aspeed_sham_finup() local 665 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); aspeed_sham_init() local 754 struct aspeed_sham_ctx *tctx = crypto_ahash_ctx(tfm); aspeed_sham_setkey() local 791 struct aspeed_sham_ctx *tctx = crypto_tfm_ctx(tfm); aspeed_sham_cra_init() local 821 struct aspeed_sham_ctx *tctx = crypto_tfm_ctx(tfm); aspeed_sham_cra_exit() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/mediatek/ |
H A D | mtk-sha.c | 153 static struct mtk_cryp *mtk_sha_find_dev(struct mtk_sha_ctx *tctx) in mtk_sha_find_dev() argument 159 if (!tctx->cryp) { in mtk_sha_find_dev() 164 tctx->cryp = cryp; in mtk_sha_find_dev() 166 cryp = tctx->cryp; in mtk_sha_find_dev() 173 tctx->id = cryp->rec; in mtk_sha_find_dev() 358 struct mtk_sha_ctx *tctx = crypto_tfm_ctx(req->base.tfm); in mtk_sha_finish_hmac() local 359 struct mtk_sha_hmac_ctx *bctx = tctx->base; in mtk_sha_finish_hmac() 375 struct mtk_sha_ctx *tctx = crypto_ahash_ctx(tfm); in mtk_sha_init() local 408 ctx->buffer = tctx->buf; in mtk_sha_init() 410 if (tctx in mtk_sha_init() 706 struct mtk_sha_ctx *tctx = crypto_tfm_ctx(req->base.tfm); mtk_sha_enqueue() local 802 struct mtk_sha_ctx *tctx = crypto_ahash_ctx(tfm); mtk_sha_setkey() local 848 struct mtk_sha_ctx *tctx = crypto_tfm_ctx(tfm); mtk_sha_cra_init_alg() local 906 struct mtk_sha_ctx *tctx = crypto_tfm_ctx(tfm); mtk_sha_cra_exit() local [all...] |