Lines Matching refs:base

46 	struct safexcel_context base;
376 struct safexcel_crypto_priv *priv = ctx->base.priv;
384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
387 ctx->base.needs_inv = true;
407 struct safexcel_crypto_priv *priv = ctx->base.priv;
453 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
457 ctx->base.needs_inv = true;
488 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
510 struct safexcel_crypto_priv *priv = ctx->base.priv;
673 static int safexcel_send_req(struct crypto_async_request *base, int ring,
680 struct skcipher_request *areq = skcipher_request_cast(base);
682 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
683 struct safexcel_crypto_priv *priv = ctx->base.priv;
708 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
709 &ctx->base.ipad, ctx->state_sz);
711 memcpy(ctx->base.ctxr->data + (ctx->key_len +
712 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
773 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
782 1, 1, ctx->base.ctxr_dma,
783 1, 1, ctx->base.ctxr_dma,
805 ctx->base.ctxr_dma, &atoken);
822 safexcel_context_control(ctx, base, sreq, first_cdesc);
888 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
919 struct crypto_async_request *base,
923 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
949 if (ctx->base.exit_inv) {
950 dma_pool_free(priv->context_pool, ctx->base.ctxr,
951 ctx->base.ctxr_dma);
959 ctx->base.ring = ring;
962 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1022 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1025 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1026 struct safexcel_crypto_priv *priv = ctx->base.priv;
1029 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1043 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1045 struct safexcel_crypto_priv *priv = ctx->base.priv;
1076 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1078 struct safexcel_crypto_priv *priv = ctx->base.priv;
1095 struct crypto_async_request *base,
1100 struct safexcel_crypto_priv *priv = ctx->base.priv;
1101 int ring = ctx->base.ring;
1104 ctx = crypto_tfm_ctx(base->tfm);
1105 ctx->base.exit_inv = true;
1109 crypto_enqueue_request(&priv->ring[ring].queue, base);
1139 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1154 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1157 static int safexcel_queue_req(struct crypto_async_request *base,
1161 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1162 struct safexcel_crypto_priv *priv = ctx->base.priv;
1168 if (ctx->base.ctxr) {
1169 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1171 ctx->base.needs_inv = false;
1174 ctx->base.ring = safexcel_select_ring(priv);
1175 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1176 EIP197_GFP_FLAGS(*base),
1177 &ctx->base.ctxr_dma);
1178 if (!ctx->base.ctxr)
1182 ring = ctx->base.ring;
1185 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1196 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1202 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1211 alg.skcipher.base);
1216 ctx->base.priv = tmpl->priv;
1218 ctx->base.send = safexcel_skcipher_send;
1219 ctx->base.handle_result = safexcel_skcipher_handle_result;
1232 if (!ctx->base.ctxr)
1235 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1242 struct safexcel_crypto_priv *priv = ctx->base.priv;
1254 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255 ctx->base.ctxr_dma);
1262 struct safexcel_crypto_priv *priv = ctx->base.priv;
1274 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1275 ctx->base.ctxr_dma);
1300 .base = {
1338 .base = {
1376 .base = {
1414 .base = {
1436 struct safexcel_crypto_priv *priv = ctx->base.priv;
1449 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1452 ctx->base.needs_inv = true;
1489 .base = {
1510 struct safexcel_crypto_priv *priv = ctx->base.priv;
1518 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1520 ctx->base.needs_inv = true;
1550 .base = {
1588 .base = {
1609 struct safexcel_crypto_priv *priv = ctx->base.priv;
1617 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1619 ctx->base.needs_inv = true;
1649 .base = {
1687 .base = {
1708 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1715 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1723 alg.aead.base);
1728 ctx->base.priv = tmpl->priv;
1736 ctx->base.send = safexcel_aead_send;
1737 ctx->base.handle_result = safexcel_aead_handle_result;
1760 .base = {
1796 .base = {
1832 .base = {
1868 .base = {
1904 .base = {
1941 .base = {
1978 .base = {
2015 .base = {
2052 .base = {
2089 .base = {
2126 .base = {
2163 .base = {
2200 .base = {
2237 .base = {
2274 .base = {
2309 .base = {
2344 .base = {
2379 .base = {
2414 .base = {
2449 .base = {
2471 struct safexcel_crypto_priv *priv = ctx->base.priv;
2487 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2490 ctx->base.needs_inv = true;
2504 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2508 ctx->base.needs_inv = true;
2540 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2548 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2563 .base = {
2585 struct safexcel_crypto_priv *priv = ctx->base.priv;
2596 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2599 ctx->base.needs_inv = true;
2614 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2616 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2617 ctx->base.needs_inv = true;
2624 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2665 .base = {
2687 struct safexcel_crypto_priv *priv = ctx->base.priv;
2697 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2700 ctx->base.needs_inv = true;
2708 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2766 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2776 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2789 .base = {
2809 struct safexcel_crypto_priv *priv = ctx->base.priv;
2811 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2813 ctx->base.needs_inv = true;
2853 .base = {
2916 return safexcel_queue_req(&req->base, creq, dir);
2939 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2940 req->base.data);
2969 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
3014 .base = {
3054 .base = {
3078 struct safexcel_crypto_priv *priv = ctx->base.priv;
3083 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3085 ctx->base.needs_inv = true;
3099 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3109 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3134 .base = {
3172 .base = {
3210 .base = {
3248 .base = {
3301 .base = {
3324 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3336 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3361 .base = {
3408 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3409 req->base.data);
3427 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3443 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3471 .base = {
3507 .base = {
3542 .base = {
3611 .base = {
3656 .base = {
3713 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3724 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3748 .base = {