Lines Matching refs:cryp
24 #define DRIVER_NAME "stm32-cryp"
116 struct stm32_cryp *cryp;
167 static inline bool is_aes(struct stm32_cryp *cryp)
169 return cryp->flags & FLG_AES;
172 static inline bool is_des(struct stm32_cryp *cryp)
174 return cryp->flags & FLG_DES;
177 static inline bool is_tdes(struct stm32_cryp *cryp)
179 return cryp->flags & FLG_TDES;
182 static inline bool is_ecb(struct stm32_cryp *cryp)
184 return cryp->flags & FLG_ECB;
187 static inline bool is_cbc(struct stm32_cryp *cryp)
189 return cryp->flags & FLG_CBC;
192 static inline bool is_ctr(struct stm32_cryp *cryp)
194 return cryp->flags & FLG_CTR;
197 static inline bool is_gcm(struct stm32_cryp *cryp)
199 return cryp->flags & FLG_GCM;
202 static inline bool is_ccm(struct stm32_cryp *cryp)
204 return cryp->flags & FLG_CCM;
207 static inline bool is_encrypt(struct stm32_cryp *cryp)
209 return cryp->flags & FLG_ENCRYPT;
212 static inline bool is_decrypt(struct stm32_cryp *cryp)
214 return !is_encrypt(cryp);
217 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
219 return readl_relaxed(cryp->regs + ofst);
222 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
224 writel_relaxed(val, cryp->regs + ofst);
227 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
231 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
235 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
239 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
243 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
247 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
251 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
252 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
256 struct stm32_cryp *tmp, *cryp = NULL;
259 if (!ctx->cryp) {
261 cryp = tmp;
264 ctx->cryp = cryp;
266 cryp = ctx->cryp;
271 return cryp;
274 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
279 stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++));
280 stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++));
282 if (is_aes(cryp)) {
283 stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++));
284 stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++));
288 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
290 struct skcipher_request *req = cryp->req;
296 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
297 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
299 if (is_aes(cryp)) {
300 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
301 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
321 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
323 if (is_aes(cryp) && is_ecb(cryp))
326 if (is_aes(cryp) && is_cbc(cryp))
329 if (is_aes(cryp) && is_ctr(cryp))
332 if (is_aes(cryp) && is_gcm(cryp))
335 if (is_aes(cryp) && is_ccm(cryp))
338 if (is_des(cryp) && is_ecb(cryp))
341 if (is_des(cryp) && is_cbc(cryp))
344 if (is_tdes(cryp) && is_ecb(cryp))
347 if (is_tdes(cryp) && is_cbc(cryp))
350 dev_err(cryp->dev, "Unknown mode\n");
354 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
356 return is_encrypt(cryp) ? cryp->areq->cryptlen :
357 cryp->areq->cryptlen - cryp->authsize;
360 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
366 memcpy(iv, cryp->areq->iv, 12);
368 cryp->gcm_ctr = GCM_CTR_INIT;
369 stm32_cryp_hw_write_iv(cryp, iv);
371 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
374 ret = stm32_cryp_wait_enable(cryp);
376 dev_err(cryp->dev, "Timeout (gcm init)\n");
381 if (cryp->areq->assoclen) {
383 stm32_cryp_write(cryp, CRYP_CR, cfg);
384 } else if (stm32_cryp_get_input_text_len(cryp)) {
386 stm32_cryp_write(cryp, CRYP_CR, cfg);
392 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
398 if (!cryp->header_in) {
400 err = stm32_cryp_wait_busy(cryp);
402 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
403 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
404 stm32_cryp_finish_req(cryp, err);
408 if (stm32_cryp_get_input_text_len(cryp)) {
410 cfg = stm32_cryp_read(cryp, CRYP_CR);
412 stm32_cryp_write(cryp, CRYP_CR, cfg);
416 stm32_cryp_write(cryp, CRYP_CR, cfg);
427 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
432 u32 alen = cryp->areq->assoclen;
454 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
456 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
458 cryp->header_in -= written;
460 stm32_crypt_gcmccm_end_header(cryp);
463 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
473 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
476 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
481 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
483 if (cryp->areq->assoclen)
486 textlen = stm32_cryp_get_input_text_len(cryp);
492 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
501 if (!cryp->caps->padding_wa)
503 stm32_cryp_write(cryp, CRYP_DIN, xd);
507 ret = stm32_cryp_wait_enable(cryp);
509 dev_err(cryp->dev, "Timeout (ccm init)\n");
514 if (cryp->areq->assoclen) {
516 stm32_cryp_write(cryp, CRYP_CR, cfg);
519 stm32_cryp_write_ccm_first_header(cryp);
520 } else if (stm32_cryp_get_input_text_len(cryp)) {
522 stm32_cryp_write(cryp, CRYP_CR, cfg);
528 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
533 pm_runtime_get_sync(cryp->dev);
536 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
539 stm32_cryp_hw_write_key(cryp);
544 switch (cryp->ctx->keylen) {
559 hw_mode = stm32_cryp_get_hw_mode(cryp);
564 if (is_decrypt(cryp) &&
566 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
569 ret = stm32_cryp_wait_busy(cryp);
571 dev_err(cryp->dev, "Timeout (key preparation)\n");
578 if (is_decrypt(cryp))
582 stm32_cryp_write(cryp, CRYP_CR, cfg);
589 ret = stm32_cryp_ccm_init(cryp, cfg);
591 ret = stm32_cryp_gcm_init(cryp, cfg);
602 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
612 stm32_cryp_write(cryp, CRYP_CR, cfg);
617 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
619 if (!err && (is_gcm(cryp) || is_ccm(cryp)))
621 err = stm32_cryp_read_auth_tag(cryp);
623 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
624 stm32_cryp_get_iv(cryp);
626 pm_runtime_mark_last_busy(cryp->dev);
627 pm_runtime_put_autosuspend(cryp->dev);
629 if (is_gcm(cryp) || is_ccm(cryp))
630 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
632 crypto_finalize_skcipher_request(cryp->engine, cryp->req,
636 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
639 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
682 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
684 if (!cryp)
689 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
696 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
698 if (!cryp)
703 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
987 struct stm32_cryp *cryp;
998 cryp = ctx->cryp;
1000 if (!cryp)
1006 ctx->cryp = cryp;
1008 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1009 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1010 cryp->ctx = ctx;
1013 cryp->req = req;
1014 cryp->areq = NULL;
1015 cryp->header_in = 0;
1016 cryp->payload_in = req->cryptlen;
1017 cryp->payload_out = req->cryptlen;
1018 cryp->authsize = 0;
1036 cryp->areq = areq;
1037 cryp->req = NULL;
1038 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1039 if (is_encrypt(cryp)) {
1040 cryp->payload_in = areq->cryptlen;
1041 cryp->header_in = areq->assoclen;
1042 cryp->payload_out = areq->cryptlen;
1044 cryp->payload_in = areq->cryptlen - cryp->authsize;
1045 cryp->header_in = areq->assoclen;
1046 cryp->payload_out = cryp->payload_in;
1051 scatterwalk_start(&cryp->in_walk, in_sg);
1053 cryp->out_sg = req ? req->dst : areq->dst;
1054 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1056 if (is_gcm(cryp) || is_ccm(cryp)) {
1058 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1061 if (is_ctr(cryp))
1062 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1064 ret = stm32_cryp_hw_init(cryp);
1085 struct stm32_cryp *cryp = ctx->cryp;
1087 if (!cryp)
1090 return stm32_cryp_cpu_start(cryp);
1106 struct stm32_cryp *cryp = ctx->cryp;
1108 if (!cryp)
1111 if (unlikely(!cryp->payload_in && !cryp->header_in)) {
1113 stm32_cryp_finish_req(cryp, 0);
1117 return stm32_cryp_cpu_start(cryp);
1120 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1127 cfg = stm32_cryp_read(cryp, CRYP_CR);
1134 stm32_cryp_write(cryp, CRYP_CR, cfg);
1136 if (is_gcm(cryp)) {
1138 size_bit = cryp->areq->assoclen * 8;
1139 if (cryp->caps->swap_final)
1142 stm32_cryp_write(cryp, CRYP_DIN, 0);
1143 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1145 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1146 cryp->areq->cryptlen - cryp->authsize;
1148 if (cryp->caps->swap_final)
1151 stm32_cryp_write(cryp, CRYP_DIN, 0);
1152 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1159 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1165 if (!cryp->caps->padding_wa)
1167 stm32_cryp_write(cryp, CRYP_DIN, xiv);
1172 ret = stm32_cryp_wait_output(cryp);
1174 dev_err(cryp->dev, "Timeout (read tag)\n");
1178 if (is_encrypt(cryp)) {
1183 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1185 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1190 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1193 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1195 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1199 /* Disable cryp */
1201 stm32_cryp_write(cryp, CRYP_CR, cfg);
1206 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1210 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1215 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1217 cr = stm32_cryp_read(cryp, CRYP_CR);
1218 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1220 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1222 stm32_cryp_write(cryp, CRYP_CR, cr);
1226 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
1227 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
1228 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
1229 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
1232 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1237 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1238 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1240 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1241 cryp->payload_out), 1);
1242 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1243 cryp->payload_out);
1246 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1251 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1252 cryp->payload_in), 0);
1253 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1254 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1256 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1259 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1268 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1269 cfg = stm32_cryp_read(cryp, CRYP_CR);
1271 stm32_cryp_write(cryp, CRYP_CR, cfg);
1274 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1279 stm32_cryp_write(cryp, CRYP_CR, cfg);
1283 stm32_cryp_write(cryp, CRYP_CR, cfg);
1286 stm32_cryp_irq_write_block(cryp);
1288 err = stm32_cryp_wait_output(cryp);
1290 dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1291 return stm32_cryp_finish_req(cryp, err);
1299 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1300 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1302 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1303 cryp->payload_out), 1);
1304 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1305 cryp->payload_out);
1310 stm32_cryp_write(cryp, CRYP_CR, cfg);
1315 stm32_cryp_write(cryp, CRYP_CR, cfg);
1319 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1322 err = stm32_cryp_wait_output(cryp);
1324 dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
1325 return stm32_cryp_finish_req(cryp, err);
1329 stm32_cryp_read(cryp, CRYP_DOUT);
1332 stm32_cryp_finish_req(cryp, 0);
1335 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1340 cfg = stm32_cryp_read(cryp, CRYP_CR);
1342 stm32_cryp_write(cryp, CRYP_CR, cfg);
1344 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
1346 stm32_cryp_write(cryp, CRYP_CR, cfg);
1349 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1360 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1362 cfg = stm32_cryp_read(cryp, CRYP_CR);
1364 stm32_cryp_write(cryp, CRYP_CR, cfg);
1367 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1371 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1374 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1379 stm32_cryp_write(cryp, CRYP_CR, cfg);
1383 stm32_cryp_write(cryp, CRYP_CR, cfg);
1386 stm32_cryp_irq_write_block(cryp);
1388 err = stm32_cryp_wait_output(cryp);
1390 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1391 return stm32_cryp_finish_req(cryp, err);
1399 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1400 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1402 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1403 cryp->payload_out), 1);
1404 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
1408 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1413 stm32_cryp_write(cryp, CRYP_CR, cfg);
1418 stm32_cryp_write(cryp, CRYP_CR, cfg);
1424 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1428 err = stm32_cryp_wait_busy(cryp);
1430 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1433 stm32_cryp_finish_req(cryp, err);
1436 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1438 if (unlikely(!cryp->payload_in)) {
1439 dev_warn(cryp->dev, "No more data to process\n");
1443 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
1444 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1445 is_encrypt(cryp))) {
1447 if (cryp->caps->padding_wa) {
1449 stm32_cryp_irq_write_gcm_padded_data(cryp);
1454 stm32_cryp_irq_set_npblb(cryp);
1457 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
1458 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1459 is_decrypt(cryp))) {
1461 if (cryp->caps->padding_wa) {
1463 stm32_cryp_irq_write_ccm_padded_data(cryp);
1468 stm32_cryp_irq_set_npblb(cryp);
1471 if (is_aes(cryp) && is_ctr(cryp))
1472 stm32_cryp_check_ctr_counter(cryp);
1474 stm32_cryp_irq_write_block(cryp);
1477 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
1483 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
1485 scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
1487 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1489 cryp->header_in -= written;
1491 stm32_crypt_gcmccm_end_header(cryp);
1496 struct stm32_cryp *cryp = arg;
1498 u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR);
1500 if (cryp->irq_status & MISR_OUT)
1502 stm32_cryp_irq_read_data(cryp);
1504 if (cryp->irq_status & MISR_IN) {
1505 if (is_gcm(cryp) || is_ccm(cryp)) {
1506 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1509 stm32_cryp_irq_write_gcmccm_header(cryp);
1512 stm32_cryp_irq_write_data(cryp);
1513 if (is_gcm(cryp))
1514 cryp->gcm_ctr++;
1517 stm32_cryp_irq_write_data(cryp);
1522 if (!cryp->payload_in && !cryp->header_in)
1524 if (!cryp->payload_out)
1526 stm32_cryp_write(cryp, CRYP_IMSCR, it_mask);
1528 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
1529 stm32_cryp_finish_req(cryp, 0);
1536 struct stm32_cryp *cryp = arg;
1538 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1723 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1724 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1732 struct stm32_cryp *cryp;
1736 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1737 if (!cryp)
1740 cryp->caps = of_device_get_match_data(dev);
1741 if (!cryp->caps)
1744 cryp->dev = dev;
1746 cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1747 if (IS_ERR(cryp->regs))
1748 return PTR_ERR(cryp->regs);
1756 dev_name(dev), cryp);
1762 cryp->clk = devm_clk_get(dev, NULL);
1763 if (IS_ERR(cryp->clk)) {
1765 return PTR_ERR(cryp->clk);
1768 ret = clk_prepare_enable(cryp->clk);
1770 dev_err(cryp->dev, "Failed to enable clock\n");
1788 platform_set_drvdata(pdev, cryp);
1791 list_add(&cryp->list, &cryp_list.dev_list);
1795 cryp->engine = crypto_engine_alloc_init(dev, 1);
1796 if (!cryp->engine) {
1802 ret = crypto_engine_start(cryp->engine);
1828 crypto_engine_exit(cryp->engine);
1831 list_del(&cryp->list);
1837 clk_disable_unprepare(cryp->clk);
1844 struct stm32_cryp *cryp = platform_get_drvdata(pdev);
1847 if (!cryp)
1850 ret = pm_runtime_resume_and_get(cryp->dev);
1857 crypto_engine_exit(cryp->engine);
1860 list_del(&cryp->list);
1863 pm_runtime_disable(cryp->dev);
1864 pm_runtime_put_noidle(cryp->dev);
1866 clk_disable_unprepare(cryp->clk);
1874 struct stm32_cryp *cryp = dev_get_drvdata(dev);
1876 clk_disable_unprepare(cryp->clk);
1883 struct stm32_cryp *cryp = dev_get_drvdata(dev);
1886 ret = clk_prepare_enable(cryp->clk);
1888 dev_err(cryp->dev, "Failed to prepare_enable clock\n");