Lines Matching refs:tfm_entry
134 * @tfm_entry: per-cpu pointer to one entry in TFM list
150 struct tipc_tfm * __percpu *tfm_entry;
403 struct tipc_tfm *tfm_entry, *head, *tmp;
408 head = *get_cpu_ptr(aead->tfm_entry);
409 put_cpu_ptr(aead->tfm_entry);
410 list_for_each_entry_safe(tfm_entry, tmp, &head->list, list) {
411 crypto_free_aead(tfm_entry->tfm);
412 list_del(&tfm_entry->list);
413 kfree(tfm_entry);
420 free_percpu(aead->tfm_entry);
483 struct tipc_tfm **tfm_entry;
486 tfm_entry = get_cpu_ptr(aead->tfm_entry);
487 *tfm_entry = list_next_entry(*tfm_entry, list);
488 tfm = (*tfm_entry)->tfm;
489 put_cpu_ptr(tfm_entry);
510 struct tipc_tfm *tfm_entry, *head;
528 tmp->tfm_entry = alloc_percpu(struct tipc_tfm *);
529 if (!tmp->tfm_entry) {
556 tfm_entry = kmalloc(sizeof(*tfm_entry), GFP_KERNEL);
557 if (unlikely(!tfm_entry)) {
562 INIT_LIST_HEAD(&tfm_entry->list);
563 tfm_entry->tfm = tfm;
567 head = tfm_entry;
569 *per_cpu_ptr(tmp->tfm_entry, cpu) = head;
572 list_add_tail(&tfm_entry->list, &head->list);
579 free_percpu(tmp->tfm_entry);
637 aead->tfm_entry = alloc_percpu_gfp(struct tipc_tfm *, GFP_ATOMIC);
638 if (unlikely(!aead->tfm_entry)) {
644 *per_cpu_ptr(aead->tfm_entry, cpu) =
645 *per_cpu_ptr(src->tfm_entry, cpu);