Lines Matching refs:darg

158 			      struct tls_decrypt_arg *darg)
167 char content_type = darg->zc ? darg->tail : 0;
258 struct tls_decrypt_arg *darg)
271 if (darg->async) {
295 darg->async_done = true;
297 darg->async = false;
302 darg->async = false;
1444 * They must transform the darg in/out argument are as follows:
1451 * If ZC decryption was performed darg.skb will point to the input skb.
1455 * or in skb buffers itself. The input parameter 'darg->zc' indicates if
1459 * zero-copy gets disabled and 'darg->zc' is updated.
1463 struct tls_decrypt_arg *darg)
1487 if (darg->zc && (out_iov || out_sg)) {
1496 darg->zc = false;
1599 data_len + prot->tail_size, aead_req, darg);
1601 if (darg->async_done)
1606 darg->skb = clear_skb ?: tls_strp_msg(ctx);
1609 if (unlikely(darg->async)) {
1612 __skb_queue_tail(&ctx->async_hold, darg->skb);
1616 if (unlikely(darg->async_done))
1620 darg->tail = dctx->tail;
1635 struct msghdr *msg, struct tls_decrypt_arg *darg)
1642 err = tls_decrypt_sg(sk, &msg->msg_iter, NULL, darg);
1651 if (unlikely(darg->zc && prot->version == TLS_1_3_VERSION &&
1652 darg->tail != TLS_RECORD_TYPE_DATA)) {
1653 darg->zc = false;
1654 if (!darg->tail)
1657 return tls_decrypt_sw(sk, tls_ctx, msg, darg);
1660 pad = tls_padding_length(prot, darg->skb, darg);
1662 if (darg->skb != tls_strp_msg(ctx))
1663 consume_skb(darg->skb);
1667 rxm = strp_msg(darg->skb);
1675 struct tls_context *tls_ctx, struct tls_decrypt_arg *darg)
1689 pad = tls_padding_length(prot, tls_strp_msg(ctx), darg);
1693 darg->async = false;
1694 darg->skb = tls_strp_msg(ctx);
1696 darg->zc &= !(prot->version == TLS_1_3_VERSION &&
1697 tls_msg(darg->skb)->control != TLS_RECORD_TYPE_DATA);
1699 rxm = strp_msg(darg->skb);
1702 if (!darg->zc) {
1704 darg->skb = tls_strp_msg_detach(ctx);
1705 if (!darg->skb)
1716 err = skb_copy_datagram_msg(darg->skb, off, msg, len);
1724 struct tls_decrypt_arg *darg)
1731 err = tls_decrypt_device(sk, msg, tls_ctx, darg);
1733 err = tls_decrypt_sw(sk, tls_ctx, msg, darg);
1737 rxm = strp_msg(darg->skb);
1747 struct tls_decrypt_arg darg = { .zc = true, };
1749 return tls_decrypt_sg(sk, NULL, sgout, &darg);
2006 struct tls_decrypt_arg darg;
2024 memset(&darg.inargs, 0, sizeof(darg.inargs));
2033 darg.zc = true;
2037 darg.async = ctx->async_capable;
2039 darg.async = false;
2041 err = tls_rx_one_record(sk, msg, &darg);
2047 async |= darg.async;
2056 err = tls_record_content_type(msg, tls_msg(darg.skb), &control);
2058 DEBUG_NET_WARN_ON_ONCE(darg.zc);
2061 __skb_queue_tail(&ctx->rx_list, darg.skb);
2071 rxm = strp_msg(darg.skb);
2075 if (!darg.zc) {
2077 struct sk_buff *skb = darg.skb;
2079 DEBUG_NET_WARN_ON_ONCE(darg.skb == ctx->strp.anchor);
2193 struct tls_decrypt_arg darg;
2200 memset(&darg.inargs, 0, sizeof(darg.inargs));
2202 err = tls_rx_one_record(sk, NULL, &darg);
2209 skb = darg.skb;
2280 struct tls_decrypt_arg darg;
2286 memset(&darg.inargs, 0, sizeof(darg.inargs));
2288 err = tls_rx_one_record(sk, NULL, &darg);
2297 skb = darg.skb;