Lines Matching refs:base
45 struct safexcel_context base;
376 struct safexcel_crypto_priv *priv = ctx->base.priv;
384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
387 ctx->base.needs_inv = true;
407 struct safexcel_crypto_priv *priv = ctx->base.priv;
453 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
457 ctx->base.needs_inv = true;
488 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
510 struct safexcel_crypto_priv *priv = ctx->base.priv;
667 static int safexcel_send_req(struct crypto_async_request *base, int ring,
674 struct skcipher_request *areq = skcipher_request_cast(base);
676 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
677 struct safexcel_crypto_priv *priv = ctx->base.priv;
702 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
703 &ctx->base.ipad, ctx->state_sz);
705 memcpy(ctx->base.ctxr->data + (ctx->key_len +
706 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
758 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
767 1, 1, ctx->base.ctxr_dma,
768 1, 1, ctx->base.ctxr_dma,
790 ctx->base.ctxr_dma, &atoken);
807 safexcel_context_control(ctx, base, sreq, first_cdesc);
873 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
898 struct crypto_async_request *base,
902 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
928 if (ctx->base.exit_inv) {
929 dma_pool_free(priv->context_pool, ctx->base.ctxr,
930 ctx->base.ctxr_dma);
938 ctx->base.ring = ring;
941 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1001 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1004 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1005 struct safexcel_crypto_priv *priv = ctx->base.priv;
1008 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1022 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1024 struct safexcel_crypto_priv *priv = ctx->base.priv;
1055 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1057 struct safexcel_crypto_priv *priv = ctx->base.priv;
1074 struct crypto_async_request *base,
1079 struct safexcel_crypto_priv *priv = ctx->base.priv;
1080 int ring = ctx->base.ring;
1084 ctx = crypto_tfm_ctx(base->tfm);
1085 ctx->base.exit_inv = true;
1089 crypto_enqueue_request(&priv->ring[ring].queue, base);
1119 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1134 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1137 static int safexcel_queue_req(struct crypto_async_request *base,
1141 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1142 struct safexcel_crypto_priv *priv = ctx->base.priv;
1148 if (ctx->base.ctxr) {
1149 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1151 ctx->base.needs_inv = false;
1154 ctx->base.ring = safexcel_select_ring(priv);
1155 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1156 EIP197_GFP_FLAGS(*base),
1157 &ctx->base.ctxr_dma);
1158 if (!ctx->base.ctxr)
1162 ring = ctx->base.ring;
1165 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1176 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1182 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1191 alg.skcipher.base);
1196 ctx->base.priv = tmpl->priv;
1198 ctx->base.send = safexcel_skcipher_send;
1199 ctx->base.handle_result = safexcel_skcipher_handle_result;
1212 if (!ctx->base.ctxr)
1215 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1222 struct safexcel_crypto_priv *priv = ctx->base.priv;
1234 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1235 ctx->base.ctxr_dma);
1242 struct safexcel_crypto_priv *priv = ctx->base.priv;
1254 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255 ctx->base.ctxr_dma);
1280 .base = {
1318 .base = {
1356 .base = {
1394 .base = {
1416 struct safexcel_crypto_priv *priv = ctx->base.priv;
1429 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1432 ctx->base.needs_inv = true;
1469 .base = {
1490 struct safexcel_crypto_priv *priv = ctx->base.priv;
1498 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1500 ctx->base.needs_inv = true;
1530 .base = {
1568 .base = {
1589 struct safexcel_crypto_priv *priv = ctx->base.priv;
1597 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1599 ctx->base.needs_inv = true;
1629 .base = {
1667 .base = {
1688 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1695 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1703 alg.aead.base);
1708 ctx->base.priv = tmpl->priv;
1716 ctx->base.send = safexcel_aead_send;
1717 ctx->base.handle_result = safexcel_aead_handle_result;
1740 .base = {
1776 .base = {
1812 .base = {
1848 .base = {
1884 .base = {
1921 .base = {
1958 .base = {
1995 .base = {
2032 .base = {
2069 .base = {
2106 .base = {
2143 .base = {
2180 .base = {
2217 .base = {
2254 .base = {
2289 .base = {
2324 .base = {
2359 .base = {
2394 .base = {
2429 .base = {
2451 struct safexcel_crypto_priv *priv = ctx->base.priv;
2467 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2470 ctx->base.needs_inv = true;
2484 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2488 ctx->base.needs_inv = true;
2520 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2528 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2543 .base = {
2565 struct safexcel_crypto_priv *priv = ctx->base.priv;
2576 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2579 ctx->base.needs_inv = true;
2601 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2603 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2604 ctx->base.needs_inv = true;
2611 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2656 .base = {
2678 struct safexcel_crypto_priv *priv = ctx->base.priv;
2688 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2691 ctx->base.needs_inv = true;
2699 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2757 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2767 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2780 .base = {
2800 struct safexcel_crypto_priv *priv = ctx->base.priv;
2802 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2804 ctx->base.needs_inv = true;
2844 .base = {
2907 return safexcel_queue_req(&req->base, creq, dir);
2930 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2931 req->base.data);
2960 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
3005 .base = {
3045 .base = {
3069 struct safexcel_crypto_priv *priv = ctx->base.priv;
3074 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3076 ctx->base.needs_inv = true;
3090 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3100 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3125 .base = {
3163 .base = {
3201 .base = {
3239 .base = {
3292 .base = {
3315 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3327 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3352 .base = {
3399 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3400 req->base.data);
3418 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3434 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3462 .base = {
3498 .base = {
3533 .base = {
3602 .base = {
3647 .base = {
3704 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3715 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3739 .base = {