Lines Matching defs:aes_dev

204 static inline void aes_a_set_endianness(const struct ocs_aes_dev *aes_dev)
206 iowrite32(0x7FF, aes_dev->base_reg + AES_BYTE_ORDER_CFG_OFFSET);
210 static inline void aes_a_op_trigger(const struct ocs_aes_dev *aes_dev)
212 iowrite32(AES_ACTIVE_TRIGGER, aes_dev->base_reg + AES_ACTIVE_OFFSET);
216 static inline void aes_a_op_termination(const struct ocs_aes_dev *aes_dev)
219 aes_dev->base_reg + AES_ACTIVE_OFFSET);
231 static inline void aes_a_set_last_gcx(const struct ocs_aes_dev *aes_dev)
234 aes_dev->base_reg + AES_ACTIVE_OFFSET);
238 static inline void aes_a_wait_last_gcx(const struct ocs_aes_dev *aes_dev)
243 aes_active_reg = ioread32(aes_dev->base_reg +
249 static void aes_a_dma_wait_input_buffer_occupancy(const struct ocs_aes_dev *aes_dev)
254 reg = ioread32(aes_dev->base_reg + AES_A_DMA_STATUS_OFFSET);
265 static inline void aes_a_set_last_gcx_and_adata(const struct ocs_aes_dev *aes_dev)
268 aes_dev->base_reg + AES_ACTIVE_OFFSET);
272 static inline void aes_a_dma_set_xfer_size_zero(const struct ocs_aes_dev *aes_dev)
274 iowrite32(0, aes_dev->base_reg + AES_A_DMA_SRC_SIZE_OFFSET);
275 iowrite32(0, aes_dev->base_reg + AES_A_DMA_DST_SIZE_OFFSET);
279 static inline void aes_a_dma_active(const struct ocs_aes_dev *aes_dev)
282 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
286 static inline void aes_a_dma_active_src_ll_en(const struct ocs_aes_dev *aes_dev)
290 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
294 static inline void aes_a_dma_active_dst_ll_en(const struct ocs_aes_dev *aes_dev)
298 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
302 static inline void aes_a_dma_active_src_dst_ll_en(const struct ocs_aes_dev *aes_dev)
307 aes_dev->base_reg + AES_A_DMA_DMA_MODE_OFFSET);
311 static inline void aes_a_dma_reset_and_activate_perf_cntr(const struct ocs_aes_dev *aes_dev)
313 iowrite32(0x00000000, aes_dev->base_reg + AES_A_DMA_PERF_CNTR_OFFSET);
315 aes_dev->base_reg + AES_A_DMA_WHILE_ACTIVE_MODE_OFFSET);
319 static inline void aes_a_dma_wait_and_deactivate_perf_cntr(const struct ocs_aes_dev *aes_dev,
322 while (ioread32(aes_dev->base_reg + AES_A_DMA_PERF_CNTR_OFFSET) < delay)
325 aes_dev->base_reg + AES_A_DMA_WHILE_ACTIVE_MODE_OFFSET);
329 static void aes_irq_disable(struct ocs_aes_dev *aes_dev)
335 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET);
336 iowrite32(AES_DISABLE_INT, aes_dev->base_reg + AES_IER_OFFSET);
339 isr_val = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET);
342 aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET);
344 isr_val = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_MASK_OFFSET);
347 aes_dev->base_reg + AES_A_DMA_MSI_MASK_OFFSET);
349 isr_val = ioread32(aes_dev->base_reg + AES_ISR_OFFSET);
351 iowrite32(isr_val, aes_dev->base_reg + AES_ISR_OFFSET);
355 static void aes_irq_enable(struct ocs_aes_dev *aes_dev, u8 irq)
366 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET);
375 iowrite32(AES_COMPLETE_INT, aes_dev->base_reg + AES_IER_OFFSET);
380 iowrite32(AES_DISABLE_INT, aes_dev->base_reg + AES_IER_OFFSET);
403 aes_dev->base_reg + AES_A_DMA_MSI_IER_OFFSET);
408 static int ocs_aes_irq_enable_and_wait(struct ocs_aes_dev *aes_dev, u8 irq)
412 reinit_completion(&aes_dev->irq_completion);
413 aes_irq_enable(aes_dev, irq);
414 rc = wait_for_completion_interruptible(&aes_dev->irq_completion);
418 return aes_dev->dma_err_mask ? -EIO : 0;
422 static inline void dma_to_ocs_aes_ll(struct ocs_aes_dev *aes_dev,
425 iowrite32(0, aes_dev->base_reg + AES_A_DMA_SRC_SIZE_OFFSET);
427 aes_dev->base_reg + AES_A_DMA_NEXT_SRC_DESCR_OFFSET);
431 static inline void dma_from_ocs_aes_ll(struct ocs_aes_dev *aes_dev,
434 iowrite32(0, aes_dev->base_reg + AES_A_DMA_DST_SIZE_OFFSET);
436 aes_dev->base_reg + AES_A_DMA_NEXT_DST_DESCR_OFFSET);
441 struct ocs_aes_dev *aes_dev = dev_id;
445 aes_dma_isr = ioread32(aes_dev->base_reg + AES_A_DMA_MSI_ISR_OFFSET);
448 aes_irq_disable(aes_dev);
451 aes_dev->dma_err_mask = aes_dma_isr &
461 complete(&aes_dev->irq_completion);
468 * @aes_dev: The OCS AES device to write the key to.
477 int ocs_aes_set_key(struct ocs_aes_dev *aes_dev, u32 key_size, const u8 *key,
486 dev_err(aes_dev->dev,
493 dev_err(aes_dev->dev,
507 aes_dev->base_reg + AES_KEY_0_OFFSET +
518 iowrite32(val, aes_dev->base_reg + AES_KEY_SIZE_OFFSET);
524 static inline void set_ocs_aes_command(struct ocs_aes_dev *aes_dev,
556 iowrite32(val, aes_dev->base_reg + AES_COMMAND_OFFSET);
559 static void ocs_aes_init(struct ocs_aes_dev *aes_dev,
565 aes_irq_disable(aes_dev);
568 aes_a_set_endianness(aes_dev);
571 set_ocs_aes_command(aes_dev, cipher, mode, instruction);
578 static inline void ocs_aes_write_last_data_blk_len(struct ocs_aes_dev *aes_dev,
593 iowrite32(val, aes_dev->base_reg + AES_PLEN_OFFSET);
784 * @aes_dev: The OCS AES device to use.
796 int ocs_aes_op(struct ocs_aes_dev *aes_dev,
824 ocs_aes_init(aes_dev, mode, cipher, instruction);
828 ocs_aes_write_last_data_blk_len(aes_dev, src_size);
833 iowrite32(iv32[0], aes_dev->base_reg + AES_IV_0_OFFSET);
834 iowrite32(iv32[1], aes_dev->base_reg + AES_IV_1_OFFSET);
835 iowrite32(iv32[2], aes_dev->base_reg + AES_IV_2_OFFSET);
836 iowrite32(iv32[3], aes_dev->base_reg + AES_IV_3_OFFSET);
840 aes_a_op_trigger(aes_dev);
843 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
844 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
845 aes_a_dma_active_src_dst_ll_en(aes_dev);
852 aes_a_set_last_gcx(aes_dev);
855 aes_a_op_termination(aes_dev);
859 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
865 iv32[0] = ioread32(aes_dev->base_reg + AES_IV_0_OFFSET);
866 iv32[1] = ioread32(aes_dev->base_reg + AES_IV_1_OFFSET);
867 iv32[2] = ioread32(aes_dev->base_reg + AES_IV_2_OFFSET);
868 iv32[3] = ioread32(aes_dev->base_reg + AES_IV_3_OFFSET);
875 static void ocs_aes_gcm_write_j0(const struct ocs_aes_dev *aes_dev,
884 iowrite32(0x00000001, aes_dev->base_reg + AES_IV_0_OFFSET);
885 iowrite32(__swab32(j0[2]), aes_dev->base_reg + AES_IV_1_OFFSET);
886 iowrite32(__swab32(j0[1]), aes_dev->base_reg + AES_IV_2_OFFSET);
887 iowrite32(__swab32(j0[0]), aes_dev->base_reg + AES_IV_3_OFFSET);
891 static inline void ocs_aes_gcm_read_tag(struct ocs_aes_dev *aes_dev,
901 tag_u32[0] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_3_OFFSET));
902 tag_u32[1] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_2_OFFSET));
903 tag_u32[2] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_1_OFFSET));
904 tag_u32[3] = __swab32(ioread32(aes_dev->base_reg + AES_T_MAC_0_OFFSET));
911 * @aes_dev: The OCS AES device to use.
925 int ocs_aes_gcm_op(struct ocs_aes_dev *aes_dev,
949 ocs_aes_init(aes_dev, OCS_MODE_GCM, cipher, instruction);
952 ocs_aes_gcm_write_j0(aes_dev, iv);
955 iowrite32(tag_size, aes_dev->base_reg + AES_TLEN_OFFSET);
958 ocs_aes_write_last_data_blk_len(aes_dev, src_size);
963 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_0_OFFSET);
965 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_1_OFFSET);
970 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_2_OFFSET);
972 iowrite32(val, aes_dev->base_reg + AES_MULTIPURPOSE2_3_OFFSET);
975 aes_a_op_trigger(aes_dev);
980 dma_to_ocs_aes_ll(aes_dev, aad_dma_list);
981 aes_a_dma_active_src_ll_en(aes_dev);
984 aes_a_set_last_gcx_and_adata(aes_dev);
987 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT);
991 aes_a_set_last_gcx_and_adata(aes_dev);
995 aes_a_wait_last_gcx(aes_dev);
996 aes_a_dma_wait_input_buffer_occupancy(aes_dev);
1001 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
1002 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1003 aes_a_dma_active_src_dst_ll_en(aes_dev);
1005 aes_a_dma_set_xfer_size_zero(aes_dev);
1006 aes_a_dma_active(aes_dev);
1010 aes_a_set_last_gcx(aes_dev);
1013 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
1017 ocs_aes_gcm_read_tag(aes_dev, out_tag, tag_size);
1023 static void ocs_aes_ccm_write_encrypted_tag(struct ocs_aes_dev *aes_dev,
1029 aes_a_dma_wait_input_buffer_occupancy(aes_dev);
1036 aes_a_dma_reset_and_activate_perf_cntr(aes_dev);
1037 aes_a_dma_wait_and_deactivate_perf_cntr(aes_dev,
1042 iowrite8(in_tag[i], aes_dev->base_reg +
1054 static int ocs_aes_ccm_write_b0(const struct ocs_aes_dev *aes_dev,
1114 iowrite8(b0[i], aes_dev->base_reg +
1126 static void ocs_aes_ccm_write_adata_len(const struct ocs_aes_dev *aes_dev,
1155 aes_dev->base_reg +
1159 static int ocs_aes_ccm_do_adata(struct ocs_aes_dev *aes_dev,
1166 aes_a_set_last_gcx_and_adata(aes_dev);
1176 ocs_aes_ccm_write_adata_len(aes_dev, adata_size);
1179 dma_to_ocs_aes_ll(aes_dev, adata_dma_list);
1182 aes_a_dma_active_src_ll_en(aes_dev);
1185 aes_a_set_last_gcx_and_adata(aes_dev);
1188 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT);
1194 aes_a_wait_last_gcx(aes_dev);
1195 aes_a_dma_wait_input_buffer_occupancy(aes_dev);
1200 static int ocs_aes_ccm_encrypt_do_payload(struct ocs_aes_dev *aes_dev,
1210 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
1211 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1212 aes_a_dma_active_src_dst_ll_en(aes_dev);
1215 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1216 aes_a_dma_active_dst_ll_en(aes_dev);
1223 aes_a_set_last_gcx(aes_dev);
1226 return ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
1229 static int ocs_aes_ccm_decrypt_do_payload(struct ocs_aes_dev *aes_dev,
1236 aes_a_dma_set_xfer_size_zero(aes_dev);
1237 aes_a_dma_active(aes_dev);
1238 aes_a_set_last_gcx(aes_dev);
1247 dma_to_ocs_aes_ll(aes_dev, src_dma_list);
1248 dma_from_ocs_aes_ll(aes_dev, dst_dma_list);
1249 aes_a_dma_active_src_dst_ll_en(aes_dev);
1255 aes_a_set_last_gcx(aes_dev);
1260 return ocs_aes_irq_enable_and_wait(aes_dev, AES_DMA_SRC_DONE_INT);
1269 static inline int ccm_compare_tag_to_yr(struct ocs_aes_dev *aes_dev,
1278 tag[i] = ioread32(aes_dev->base_reg +
1280 yr[i] = ioread32(aes_dev->base_reg +
1290 * @aes_dev: The OCS AES device to use.
1307 int ocs_aes_ccm_op(struct ocs_aes_dev *aes_dev,
1330 ocs_aes_init(aes_dev, OCS_MODE_CCM, cipher, instruction);
1345 aes_dev->base_reg + AES_MULTIPURPOSE1_3_OFFSET);
1347 aes_dev->base_reg + AES_MULTIPURPOSE1_2_OFFSET);
1349 aes_dev->base_reg + AES_MULTIPURPOSE1_1_OFFSET);
1351 aes_dev->base_reg + AES_MULTIPURPOSE1_0_OFFSET);
1354 iowrite32(tag_size, aes_dev->base_reg + AES_TLEN_OFFSET);
1360 ocs_aes_write_last_data_blk_len(aes_dev, src_size);
1363 aes_a_op_trigger(aes_dev);
1365 aes_a_dma_reset_and_activate_perf_cntr(aes_dev);
1368 rc = ocs_aes_ccm_write_b0(aes_dev, iv, adata_size, tag_size, src_size);
1375 aes_a_dma_wait_and_deactivate_perf_cntr(aes_dev,
1379 ocs_aes_ccm_do_adata(aes_dev, adata_dma_list, adata_size);
1383 return ocs_aes_ccm_encrypt_do_payload(aes_dev, dst_dma_list,
1387 rc = ocs_aes_ccm_decrypt_do_payload(aes_dev, dst_dma_list,
1393 ocs_aes_ccm_write_encrypted_tag(aes_dev, in_tag, tag_size);
1394 rc = ocs_aes_irq_enable_and_wait(aes_dev, AES_COMPLETE_INT);
1398 return ccm_compare_tag_to_yr(aes_dev, tag_size);
1403 * @aes_dev: The OCS AES device the list will be created for.
1417 int ocs_create_linked_list_from_sg(const struct ocs_aes_dev *aes_dev,
1429 if (!dll_desc || !sg || !aes_dev)
1467 dll_desc->vaddr = dma_alloc_coherent(aes_dev->dev, dll_desc->size,