Home
last modified time | relevance | path

Searched refs:ctx (Results 1 - 25 of 358) sorted by relevance

12345678910>>...15

/device/soc/hisilicon/hi3516dv300/sdk_linux/drv/interdrv/common/cipher/src/drv/cipher_v1.0/drivers/
H A Dkapi_hash.c232 static hi_void kapi_hmac_cal_ipad_opad(kapi_hash_ctx *ctx) in kapi_hmac_cal_ipad_opad() argument
236 for (i = 0; i < ctx->func->block_size; i++) { in kapi_hmac_cal_ipad_opad()
237 ctx->hmac_ipad[i] ^= HMAC_IPAD_BYTE; in kapi_hmac_cal_ipad_opad()
238 ctx->hmac_opad[i] ^= HMAC_OPAD_BYTE; in kapi_hmac_cal_ipad_opad()
242 static hi_s32 kapi_hmac_start(kapi_hash_ctx *ctx, const hi_u8 *key, hi_u32 keylen) in kapi_hmac_start() argument
253 if ((memset_s(ctx->hmac_ipad, HASH_BLOCK_SIZE_128, 0x00, ctx->func->block_size) != EOK) || in kapi_hmac_start()
254 (memset_s(ctx->hmac_opad, HASH_BLOCK_SIZE_128, 0x00, ctx->func->block_size) != EOK)) { in kapi_hmac_start()
260 if (keylen <= ctx in kapi_hmac_start()
320 kapi_hmac_finish(kapi_hash_ctx *ctx, hi_u8 *hash, hi_u32 hash_buf_len, hi_u32 *hashlen) kapi_hmac_finish() argument
364 kapi_hash_finsh_calc(kapi_hash_ctx *ctx, hi_u8 *hash, hi_u32 hash_buf_len, hi_u32 *hashlen) kapi_hash_finsh_calc() argument
388 kapi_hash_ctx *ctx = HI_NULL; kapi_hash_chk_ctx() local
408 kapi_hash_set_ctx(kapi_hash_ctx *ctx, hash_mode mode, hi_u32 hmac) kapi_hash_set_ctx() argument
433 kapi_hash_create_calc(kapi_hash_ctx *ctx, hash_mode mode, hi_u32 hmac, const hi_u8 *key, hi_u32 keylen) kapi_hash_create_calc() argument
485 kapi_hash_ctx *ctx = HI_NULL; kapi_hash_start() local
548 kapi_hash_ctx *ctx = HI_NULL; kapi_hash_update() local
589 kapi_hash_ctx *ctx = HI_NULL; kapi_hash_finish() local
641 kapi_hash_ctx *ctx = HI_NULL; kapi_hash_release() local
[all...]
H A Dkapi_symc.c133 kapi_symc_ctx *ctx = HI_NULL; in kapi_symc_release() local
144 ctx = &g_kapi_ctx[i]; in kapi_symc_release()
145 if (ctx->open == HI_TRUE) { in kapi_symc_release()
146 if (memcmp(&owner, &ctx->owner, sizeof(owner)) != 0) { in kapi_symc_release()
167 kapi_symc_ctx *ctx = HI_NULL; in kapi_symc_create() local
183 ctx = &g_kapi_ctx[chn]; in kapi_symc_create()
185 (hi_void)memset_s(ctx, sizeof(kapi_symc_ctx), 0, sizeof(kapi_symc_ctx)); in kapi_symc_create()
186 crypto_get_owner(&ctx->owner); in kapi_symc_create()
189 ctx->open = HI_TRUE; in kapi_symc_create()
190 ctx in kapi_symc_create()
203 kapi_symc_ctx *ctx = HI_NULL; kapi_symc_destroy() local
528 kapi_symc_ctx *ctx = HI_NULL; kapi_symc_chk_cfg() local
568 kapi_symc_set_alg_mode(const kapi_symc_ctx *ctx, const symc_set_cfg_param *set_cfg) kapi_symc_set_alg_mode() argument
576 kapi_symc_set_key(const symc_cfg_t *cfg, const kapi_symc_ctx *ctx, symc_set_cfg_param *set_cfg) kapi_symc_set_key() argument
612 kapi_symc_cpy_key_iv(const symc_cfg_t *cfg, kapi_symc_ctx *ctx, const symc_set_cfg_param *set_cfg) kapi_symc_cpy_key_iv() argument
642 kapi_symc_cfg_set_param(const symc_cfg_t *cfg, kapi_symc_ctx *ctx, symc_set_cfg_param *set_cfg) kapi_symc_cfg_set_param() argument
701 kapi_symc_ctx *ctx = HI_NULL; kapi_symc_cfg() local
747 kapi_symc_ctx *ctx = HI_NULL; kapi_symc_get_cfg() local
790 kapi_symc_ctx *ctx = HI_NULL; kapi_symc_crypto() local
963 kapi_symc_crypto_multi_start(const kapi_symc_ctx *ctx, const hi_cipher_data *pkg, hi_u32 pkg_num, hi_u32 operation, hi_u32 wait) kapi_symc_crypto_multi_start() argument
1028 kapi_symc_ctx *ctx = HI_NULL; kapi_symc_crypto_multi() local
1065 kapi_symc_ctx *ctx = HI_NULL; kapi_aead_get_tag() local
[all...]
/device/soc/hisilicon/hi3861v100/sdk_liteos/boot/loaderboot/common/
H A Dtransfer.c231 upload_context *ctx = g_upload_ctx; in upload_send_file_info() local
234 hi_s32 temp_length = (hi_s32)ctx->file_length; in upload_send_file_info()
241 if (ctx->retry > 0) { in upload_send_file_info()
242 serial_put_buf((const char *)ctx->buffer, SOH_MSG_TOTAL_LEN); in upload_send_file_info()
246 hi_u32 check_sum_val = (uintptr_t)(ctx->buffer) ^ (hi_u32)sizeof(ctx->buffer) ^ 0 ^ (hi_u32)sizeof(ctx->buffer); in upload_send_file_info()
247 (hi_void) memset_s(ctx->buffer, sizeof(ctx->buffer), 0, sizeof(ctx in upload_send_file_info()
285 upload_context *ctx = g_upload_ctx; upload_send_null_info() local
304 upload_context *ctx = g_upload_ctx; upload_send_data() local
420 upload_context *ctx = g_upload_ctx; upload_serial_ymodem() local
[all...]
/device/soc/hisilicon/hi3861v100/sdk_liteos/third_party/mbedtls/include/mbedtls/
H A Dcipher.h382 * \param ctx The context to be initialized. This must not be \c NULL.
384 void mbedtls_cipher_init( mbedtls_cipher_context_t *ctx );
388 * context of \p ctx. Freeing \p ctx itself remains the
391 * \param ctx The context to be freed. If this is \c NULL, the
395 void mbedtls_cipher_free( mbedtls_cipher_context_t *ctx );
403 * \param ctx The context to initialize. This must be initialized.
416 int mbedtls_cipher_setup( mbedtls_cipher_context_t *ctx,
422 * \param ctx The context of the cipher. This must be initialized.
425 * \return \c 0 if \p ctx ha
[all...]
H A Dpk.h180 typedef int (*mbedtls_pk_rsa_alt_decrypt_func)( void *ctx, int mode, size_t *olen,
183 typedef int (*mbedtls_pk_rsa_alt_sign_func)( void *ctx,
187 typedef size_t (*mbedtls_pk_rsa_alt_key_len_func)( void *ctx );
202 * \param ctx The context to initialize.
205 void mbedtls_pk_init( mbedtls_pk_context *ctx );
210 * \param ctx The context to clear. It must have been initialized.
213 void mbedtls_pk_free( mbedtls_pk_context *ctx );
219 * \param ctx The context to initialize.
222 void mbedtls_pk_restart_init( mbedtls_pk_restart_ctx *ctx );
227 * \param ctx Th
[all...]
H A Drsa.h161 * \param ctx The RSA context to initialize. This must not be \c NULL.
168 void mbedtls_rsa_init( mbedtls_rsa_context *ctx,
191 * \param ctx The initialized RSA context to store the parameters in.
201 int mbedtls_rsa_import( mbedtls_rsa_context *ctx,
225 * \param ctx The initialized RSA context to store the parameters in.
240 int mbedtls_rsa_import_raw( mbedtls_rsa_context *ctx,
272 * \param ctx The initialized RSA context holding imported parameters.
279 int mbedtls_rsa_complete( mbedtls_rsa_context *ctx );
302 * \param ctx The initialized RSA context.
321 int mbedtls_rsa_export( const mbedtls_rsa_context *ctx,
[all...]
H A Daes.h124 * \param ctx The AES context to initialize. This must not be \c NULL.
126 void mbedtls_aes_init( mbedtls_aes_context *ctx );
131 * \param ctx The AES context to clear.
135 void mbedtls_aes_free( mbedtls_aes_context *ctx );
144 * \param ctx The AES XTS context to initialize. This must not be \c NULL.
146 void mbedtls_aes_xts_init( mbedtls_aes_xts_context *ctx );
151 * \param ctx The AES XTS context to clear.
155 void mbedtls_aes_xts_free( mbedtls_aes_xts_context *ctx );
161 * \param ctx The AES context to which the key should be bound.
173 int mbedtls_aes_setkey_enc( mbedtls_aes_context *ctx, cons
[all...]
H A Ddes.h88 * \param ctx DES context to be initialized
94 void mbedtls_des_init( mbedtls_des_context *ctx );
99 * \param ctx DES context to be cleared
105 void mbedtls_des_free( mbedtls_des_context *ctx );
110 * \param ctx DES3 context to be initialized
112 void mbedtls_des3_init( mbedtls_des3_context *ctx );
117 * \param ctx DES3 context to be cleared
119 void mbedtls_des3_free( mbedtls_des3_context *ctx );
167 * \param ctx DES context to be initialized
176 int mbedtls_des_setkey_enc( mbedtls_des_context *ctx, cons
[all...]
/device/soc/rockchip/rk3568/hardware/mpp/mpp/legacy/
H A Dvpu_api.cpp37 vpu_api_init(VpuCodecContext *ctx, RK_U8 *extraData, RK_U32 extra_size) in vpu_api_init() argument
40 if (ctx == nullptr) { in vpu_api_init()
44 VpuApiLegacy* api = (VpuApiLegacy*)(ctx->vpuApiObj); in vpu_api_init()
50 return api->init(ctx, extraData, extra_size); in vpu_api_init()
54 vpu_api_decode(VpuCodecContext *ctx, VideoPacket_t *pkt, DecoderOut_t *aDecOut) in vpu_api_decode() argument
56 if (ctx == nullptr) { in vpu_api_decode()
61 VpuApiLegacy* api = (VpuApiLegacy*)(ctx->vpuApiObj); in vpu_api_decode()
67 return api->decode(ctx, pkt, aDecOut); in vpu_api_decode()
69 static RK_S32 vpu_api_sendstream(VpuCodecContext *ctx, VideoPacket_t *pkt) in vpu_api_sendstream() argument
71 if (ctx in vpu_api_sendstream()
85 vpu_api_getframe(VpuCodecContext *ctx, DecoderOut_t *aDecOut) vpu_api_getframe() argument
102 vpu_api_sendframe(VpuCodecContext *ctx, EncInputStream_t *aEncInStrm) vpu_api_sendframe() argument
118 vpu_api_getstream(VpuCodecContext *ctx, EncoderOut_t *aEncOut) vpu_api_getstream() argument
135 vpu_api_encode(VpuCodecContext *ctx, EncInputStream_t *aEncInStrm, EncoderOut_t *aEncOut) vpu_api_encode() argument
152 vpu_api_flush(VpuCodecContext *ctx) vpu_api_flush() argument
169 vpu_api_control(VpuCodecContext *ctx, VPU_API_CMD cmdType, void *param) vpu_api_control() argument
248 open_orign_vpu(VpuCodecContext **ctx) open_orign_vpu() argument
256 close_orign_vpu(VpuCodecContext **ctx) close_orign_vpu() argument
268 vpu_open_context(VpuCodecContext **ctx) vpu_open_context() argument
419 vpu_close_context(VpuCodecContext **ctx) vpu_close_context() argument
[all...]
/device/soc/hisilicon/hi3516dv300/sdk_linux/drv/interdrv/common/cipher/src/drv/cipher_v1.0/drivers/extend/
H A Dext_sm3.c171 static hi_void sm3_init(ext_sm3_context *ctx) in sm3_init() argument
175 ctx->state[WORD_IDX_0] = SM3_H0; in sm3_init()
176 ctx->state[WORD_IDX_1] = SM3_H1; in sm3_init()
177 ctx->state[WORD_IDX_2] = SM3_H2; in sm3_init()
178 ctx->state[WORD_IDX_3] = SM3_H3; in sm3_init()
179 ctx->state[WORD_IDX_4] = SM3_H4; in sm3_init()
180 ctx->state[WORD_IDX_5] = SM3_H5; in sm3_init()
181 ctx->state[WORD_IDX_6] = SM3_H6; in sm3_init()
182 ctx->state[WORD_IDX_7] = SM3_H7; in sm3_init()
189 static hi_s32 sm3_update(ext_sm3_context *ctx, cons argument
247 sm3_final(ext_sm3_context *ctx, hi_u8 *digest, hi_u32 digest_len) sm3_final() argument
305 ext_sm3_context *ctx = HI_NULL; ext_sm3_create() local
323 ext_sm3_update(hi_void *ctx, const hi_u8 *chunk, hi_u32 chunk_len, hash_chunk_src src) ext_sm3_update() argument
376 ext_sm3_finish(hi_void *ctx, hi_void *hash, hi_u32 hash_buf_len, hi_u32 *hashlen) ext_sm3_finish() argument
394 ext_sm3_destory(hi_void *ctx) ext_sm3_destory() argument
[all...]
H A Dext_hash.c32 mbedtls_md_context_t *ctx = HI_NULL; in mbedtls_hash_create() local
46 ctx = crypto_malloc(sizeof(*ctx)); in mbedtls_hash_create()
47 if (ctx == HI_NULL) { in mbedtls_hash_create()
52 (hi_void)memset_s(ctx, sizeof(mbedtls_md_context_t), 0, sizeof(mbedtls_md_context_t)); in mbedtls_hash_create()
54 mbedtls_md_init(ctx); in mbedtls_hash_create()
55 mbedtls_md_setup(ctx, info, HI_FALSE); in mbedtls_hash_create()
56 mbedtls_md_starts(ctx); in mbedtls_hash_create()
60 return ctx; in mbedtls_hash_create()
107 hi_s32 mbedtls_hash_update(hi_void *ctx, cons argument
139 mbedtls_hash_finish(hi_void *ctx, hi_void *hash, hi_u32 hash_buf_len, hi_u32 *hashlen) mbedtls_hash_finish() argument
156 mbedtls_hash_destory(hi_void *ctx) mbedtls_hash_destory() argument
[all...]
/device/soc/hisilicon/hi3516dv300/sdk_linux/drv/interdrv/common/cipher/src/drv/cipher_v1.0/drivers/extend/include/
H A Dext_alg.h40 hi_s32 ext_mbedtls_aead_destory(hi_void *ctx);
51 hi_s32 ext_mbedtls_aead_setiv(hi_void *ctx, const hi_u8 *iv, hi_u32 ivlen, hi_u32 usage);
56 * param ctx SYMC handle
62 hi_s32 ext_mbedtls_aead_setkey(hi_void *ctx, const hi_u8 *fkey, const hi_u8 *skey, hi_u32 *hisi_klen);
67 * param ctx SYMC handle
74 hi_s32 ext_mbedtls_aead_set_aad(hi_void *ctx, compat_addr aad, hi_u32 alen, hi_u32 tlen);
83 hi_s32 ext_mbedtls_aead_get_tag(hi_void *ctx, hi_u32 tag[AEAD_TAG_SIZE_IN_WORD], hi_u32 *taglen);
91 * param ctx symc ctx.
98 hi_s32 ext_mbedtls_aead_ccm_crypto(hi_void *ctx, hi_u3
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_event.c44 int kbase_event_pending(struct kbase_context *ctx) in kbase_event_pending() argument
46 KBASE_DEBUG_ASSERT(ctx); in kbase_event_pending()
48 return (atomic_read(&ctx->event_count) != 0) || (atomic_read(&ctx->event_closed) != 0); in kbase_event_pending()
53 int kbase_event_dequeue(struct kbase_context *ctx, struct base_jd_event_v2 *uevent) in kbase_event_dequeue() argument
57 KBASE_DEBUG_ASSERT(ctx); in kbase_event_dequeue()
59 mutex_lock(&ctx->event_mutex); in kbase_event_dequeue()
61 if (list_empty(&ctx->event_list)) { in kbase_event_dequeue()
62 if (!atomic_read(&ctx->event_closed)) { in kbase_event_dequeue()
63 mutex_unlock(&ctx in kbase_event_dequeue()
162 kbase_event_post(struct kbase_context *ctx, struct kbase_jd_atom *event) kbase_event_post() argument
[all...]
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_event.c48 int kbase_event_pending(struct kbase_context *ctx) in kbase_event_pending() argument
50 KBASE_DEBUG_ASSERT(ctx); in kbase_event_pending()
52 return (atomic_read(&ctx->event_count) != 0) || in kbase_event_pending()
53 (atomic_read(&ctx->event_closed) != 0); in kbase_event_pending()
58 int kbase_event_dequeue(struct kbase_context *ctx, struct base_jd_event_v2 *uevent) in kbase_event_dequeue() argument
62 KBASE_DEBUG_ASSERT(ctx); in kbase_event_dequeue()
64 mutex_lock(&ctx->event_mutex); in kbase_event_dequeue()
66 if (list_empty(&ctx->event_list)) { in kbase_event_dequeue()
67 if (!atomic_read(&ctx->event_closed)) { in kbase_event_dequeue()
68 mutex_unlock(&ctx in kbase_event_dequeue()
168 kbase_event_post(struct kbase_context *ctx, struct kbase_jd_atom *atom) kbase_event_post() argument
[all...]
/device/soc/rockchip/rk3399/hardware/mpp/include/
H A Drk_mpi.h86 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
93 MPP_RET (*decode)(MppCtx ctx, MppPacket packet, MppFrame *frame);
96 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
102 MPP_RET (*decode_put_packet)(MppCtx ctx, MppPacket packet);
105 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
111 MPP_RET (*decode_get_frame)(MppCtx ctx, MppFrame *frame);
115 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
122 MPP_RET (*encode)(MppCtx ctx, MppFrame frame, MppPacket *packet);
125 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
131 MPP_RET (*encode_put_frame)(MppCtx ctx, MppFram
[all...]
/device/soc/rockchip/rk3568/hardware/mpp/include/
H A Drk_mpi.h86 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
93 MPP_RET (*decode)(MppCtx ctx, MppPacket packet, MppFrame *frame);
96 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
102 MPP_RET (*decode_put_packet)(MppCtx ctx, MppPacket packet);
105 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
111 MPP_RET (*decode_get_frame)(MppCtx ctx, MppFrame *frame);
115 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
122 MPP_RET (*encode)(MppCtx ctx, MppFrame frame, MppPacket *packet);
125 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
131 MPP_RET (*encode_put_frame)(MppCtx ctx, MppFram
[all...]
/device/soc/rockchip/common/hardware/mpp/include/
H A Drk_mpi.h86 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
93 MPP_RET (*decode)(MppCtx ctx, MppPacket packet, MppFrame *frame);
96 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
102 MPP_RET (*decode_put_packet)(MppCtx ctx, MppPacket packet);
105 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
111 MPP_RET (*decode_get_frame)(MppCtx ctx, MppFrame *frame);
115 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
122 MPP_RET (*encode)(MppCtx ctx, MppFrame frame, MppPacket *packet);
125 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
131 MPP_RET (*encode_put_frame)(MppCtx ctx, MppFram
[all...]
H A Dmpp_device.h76 MPP_RET (*init)(void *ctx, MppClientType type);
77 MPP_RET (*deinit)(void *ctx);
80 MPP_RET (*attach)(void *ctx);
81 MPP_RET (*detach)(void *ctx);
84 MPP_RET (*reg_wr)(void *ctx, MppDevRegWrCfg *cfg);
85 MPP_RET (*reg_rd)(void *ctx, MppDevRegRdCfg *cfg);
86 MPP_RET (*reg_offset)(void *ctx, MppDevRegOffsetCfg *cfg);
87 MPP_RET (*rcb_info)(void *ctx, MppDevRcbInfoCfg *cfg);
88 MPP_RET (*set_info)(void *ctx, MppDevInfoCfg *cfg);
91 MPP_RET (*cmd_send)(void *ctx);
[all...]
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_event.c51 int kbase_event_dequeue(struct kbase_context *ctx, struct base_jd_event_v2 *uevent) in kbase_event_dequeue() argument
55 KBASE_DEBUG_ASSERT(ctx); in kbase_event_dequeue()
57 mutex_lock(&ctx->event_mutex); in kbase_event_dequeue()
59 if (list_empty(&ctx->event_list)) { in kbase_event_dequeue()
60 if (!atomic_read(&ctx->event_closed)) { in kbase_event_dequeue()
61 mutex_unlock(&ctx->event_mutex); in kbase_event_dequeue()
66 mutex_unlock(&ctx->event_mutex); in kbase_event_dequeue()
69 dev_dbg(ctx->kbdev->dev, in kbase_event_dequeue()
76 atomic_dec(&ctx->event_count); in kbase_event_dequeue()
77 atom = list_entry(ctx in kbase_event_dequeue()
163 kbase_event_post(struct kbase_context *ctx, struct kbase_jd_atom *atom) kbase_event_post() argument
[all...]
H A Dmali_kbase_hwcnt_backend_csf_if_fw.c94 struct kbase_hwcnt_backend_csf_if_ctx *ctx) in kbasep_hwcnt_backend_csf_if_fw_assert_lock_held()
99 WARN_ON(!ctx); in kbasep_hwcnt_backend_csf_if_fw_assert_lock_held()
101 fw_ctx = (struct kbase_hwcnt_backend_csf_if_fw_ctx *)ctx; in kbasep_hwcnt_backend_csf_if_fw_assert_lock_held()
108 kbasep_hwcnt_backend_csf_if_fw_lock(struct kbase_hwcnt_backend_csf_if_ctx *ctx, in kbasep_hwcnt_backend_csf_if_fw_lock() argument
114 WARN_ON(!ctx); in kbasep_hwcnt_backend_csf_if_fw_lock()
116 fw_ctx = (struct kbase_hwcnt_backend_csf_if_fw_ctx *)ctx; in kbasep_hwcnt_backend_csf_if_fw_lock()
123 struct kbase_hwcnt_backend_csf_if_ctx *ctx, unsigned long flags) in kbasep_hwcnt_backend_csf_if_fw_unlock()
128 WARN_ON(!ctx); in kbasep_hwcnt_backend_csf_if_fw_unlock()
130 fw_ctx = (struct kbase_hwcnt_backend_csf_if_fw_ctx *)ctx; in kbasep_hwcnt_backend_csf_if_fw_unlock()
220 struct kbase_hwcnt_backend_csf_if_ctx *ctx, in kbasep_hwcnt_backend_csf_if_fw_get_prfcnt_info()
93 kbasep_hwcnt_backend_csf_if_fw_assert_lock_held( struct kbase_hwcnt_backend_csf_if_ctx *ctx) kbasep_hwcnt_backend_csf_if_fw_assert_lock_held() argument
122 kbasep_hwcnt_backend_csf_if_fw_unlock( struct kbase_hwcnt_backend_csf_if_ctx *ctx, unsigned long flags) kbasep_hwcnt_backend_csf_if_fw_unlock() argument
219 kbasep_hwcnt_backend_csf_if_fw_get_prfcnt_info( struct kbase_hwcnt_backend_csf_if_ctx *ctx, struct kbase_hwcnt_backend_csf_if_prfcnt_info *prfcnt_info) kbasep_hwcnt_backend_csf_if_fw_get_prfcnt_info() argument
290 kbasep_hwcnt_backend_csf_if_fw_ring_buf_alloc( struct kbase_hwcnt_backend_csf_if_ctx *ctx, u32 buf_count, void **cpu_dump_base, struct kbase_hwcnt_backend_csf_if_ring_buf **out_ring_buf) kbasep_hwcnt_backend_csf_if_fw_ring_buf_alloc() argument
405 kbasep_hwcnt_backend_csf_if_fw_ring_buf_sync( struct kbase_hwcnt_backend_csf_if_ctx *ctx, struct kbase_hwcnt_backend_csf_if_ring_buf *ring_buf, u32 buf_index_first, u32 buf_index_last, bool for_cpu) kbasep_hwcnt_backend_csf_if_fw_ring_buf_sync() argument
481 kbasep_hwcnt_backend_csf_if_fw_timestamp_ns( struct kbase_hwcnt_backend_csf_if_ctx *ctx) kbasep_hwcnt_backend_csf_if_fw_timestamp_ns() argument
488 kbasep_hwcnt_backend_csf_if_fw_ring_buf_free( struct kbase_hwcnt_backend_csf_if_ctx *ctx, struct kbase_hwcnt_backend_csf_if_ring_buf *ring_buf) kbasep_hwcnt_backend_csf_if_fw_ring_buf_free() argument
521 kbasep_hwcnt_backend_csf_if_fw_dump_enable( struct kbase_hwcnt_backend_csf_if_ctx *ctx, struct kbase_hwcnt_backend_csf_if_ring_buf *ring_buf, struct kbase_hwcnt_backend_csf_if_enable *enable) kbasep_hwcnt_backend_csf_if_fw_dump_enable() argument
604 kbasep_hwcnt_backend_csf_if_fw_dump_disable( struct kbase_hwcnt_backend_csf_if_ctx *ctx) kbasep_hwcnt_backend_csf_if_fw_dump_disable() argument
643 kbasep_hwcnt_backend_csf_if_fw_dump_request( struct kbase_hwcnt_backend_csf_if_ctx *ctx) kbasep_hwcnt_backend_csf_if_fw_dump_request() argument
667 kbasep_hwcnt_backend_csf_if_fw_get_indexes( struct kbase_hwcnt_backend_csf_if_ctx *ctx, u32 *extract_index, u32 *insert_index) kbasep_hwcnt_backend_csf_if_fw_get_indexes() argument
685 kbasep_hwcnt_backend_csf_if_fw_set_extract_index( struct kbase_hwcnt_backend_csf_if_ctx *ctx, u32 extract_idx) kbasep_hwcnt_backend_csf_if_fw_set_extract_index() argument
701 kbasep_hwcnt_backend_csf_if_fw_get_gpu_cycle_count( struct kbase_hwcnt_backend_csf_if_ctx *ctx, u64 *cycle_counts, u64 clk_enable_map) kbasep_hwcnt_backend_csf_if_fw_get_gpu_cycle_count() argument
757 struct kbase_hwcnt_backend_csf_if_fw_ctx *ctx = NULL; kbasep_hwcnt_backend_csf_if_fw_ctx_create() local
803 struct kbase_hwcnt_backend_csf_if_fw_ctx *ctx = NULL; kbase_hwcnt_backend_csf_if_fw_create() local
[all...]
H A Dmali_kbase_hwcnt_backend_csf_if.h80 * @ctx: Non-NULL pointer to a CSF context.
83 struct kbase_hwcnt_backend_csf_if_ctx *ctx);
88 * @ctx: Non-NULL pointer to a CSF context.
93 struct kbase_hwcnt_backend_csf_if_ctx *ctx,
99 * @ctx: Non-NULL pointer to a CSF context.
104 struct kbase_hwcnt_backend_csf_if_ctx *ctx,
110 * @ctx: Non-NULL pointer to a CSF context.
115 struct kbase_hwcnt_backend_csf_if_ctx *ctx,
121 * @ctx: Non-NULL pointer to a CSF context.
135 struct kbase_hwcnt_backend_csf_if_ctx *ctx, u3
286 struct kbase_hwcnt_backend_csf_if_ctx *ctx; global() member
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_event.c52 int kbase_event_dequeue(struct kbase_context *ctx, struct base_jd_event_v2 *uevent) in kbase_event_dequeue() argument
56 KBASE_DEBUG_ASSERT(ctx); in kbase_event_dequeue()
58 mutex_lock(&ctx->event_mutex); in kbase_event_dequeue()
60 if (list_empty(&ctx->event_list)) { in kbase_event_dequeue()
61 if (!atomic_read(&ctx->event_closed)) { in kbase_event_dequeue()
62 mutex_unlock(&ctx->event_mutex); in kbase_event_dequeue()
67 mutex_unlock(&ctx->event_mutex); in kbase_event_dequeue()
70 dev_dbg(ctx->kbdev->dev, "event system closed, returning BASE_JD_EVENT_DRV_TERMINATED(0x%X)\n", in kbase_event_dequeue()
76 atomic_dec(&ctx->event_count); in kbase_event_dequeue()
77 atom = list_entry(ctx in kbase_event_dequeue()
162 kbase_event_post(struct kbase_context *ctx, struct kbase_jd_atom *atom) kbase_event_post() argument
[all...]
/device/soc/rockchip/rk3588/hardware/mpp/include/
H A Drk_mpi.h87 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
94 MPP_RET (*decode)(MppCtx ctx, MppPacket packet, MppFrame *frame);
97 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
103 MPP_RET (*decode_put_packet)(MppCtx ctx, MppPacket packet);
106 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
112 MPP_RET (*decode_get_frame)(MppCtx ctx, MppFrame *frame);
116 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
123 MPP_RET (*encode)(MppCtx ctx, MppFrame frame, MppPacket *packet);
126 * @param[in] ctx The context of mpp, created by mpp_create() and initiated
132 MPP_RET (*encode_put_frame)(MppCtx ctx, MppFram
[all...]
/device/soc/rockchip/rk3568/hardware/mpp/mpp/legacy/inc/
H A Dmpp_bitwrite.h39 MPP_RET mpp_writer_init(MppWriteCtx *ctx, void *p, RK_S32 size);
40 MPP_RET mpp_writer_reset(MppWriteCtx *ctx);
43 MPP_RET mpp_writer_status(MppWriteCtx *ctx);
45 void mpp_writer_flush(MppWriteCtx *ctx);
48 void mpp_writer_put_raw_bits(MppWriteCtx *ctx, RK_S32 val, RK_S32 len);
51 void mpp_writer_put_bits(MppWriteCtx *ctx, RK_S32 val, RK_S32 len);
54 void mpp_writer_align_zero(MppWriteCtx *ctx);
57 void mpp_writer_align_one(MppWriteCtx *ctx);
60 void mpp_writer_trailing(MppWriteCtx * ctx);
62 void mpp_writer_put_ue(MppWriteCtx *ctx, RK_U3
[all...]
/device/soc/hisilicon/hi3516dv300/sdk_linux/drv/interdrv/common/cipher/src/drv/cipher_v1.0/drivers/core/
H A Ddrv_symc_v200.c91 hi_void *ctx; /* params for isr callback function */ member
169 symc_hard_context *ctx = HI_NULL; in drv_symc_interrupt_isr() local
181 ctx = &g_hard_context[i]; in drv_symc_interrupt_isr()
182 if ((ctx->callback) && (ctx->callback(ctx->ctx) == HI_FALSE)) { in drv_symc_interrupt_isr()
188 ctx->done = HI_TRUE; in drv_symc_interrupt_isr()
190 crypto_queue_wait_up(&ctx->queue); in drv_symc_interrupt_isr()
315 static hi_s32 drv_symc_wait_irq(hi_u32 chn_num, symc_hard_context *ctx, hi_u3 argument
368 symc_hard_context *ctx = &g_hard_context[chn]; drv_symc_recover_entry() local
409 symc_hard_context *ctx = &g_hard_context[chn]; drv_symc_set_entry() local
440 symc_hard_context *ctx = &g_hard_context[chn]; drv_symc_set_pad_buffer() local
531 symc_hard_context *ctx = HI_NULL; drv_symc_print_last_node() local
1033 symc_hard_context *ctx = HI_NULL; drv_symc_set_iv() local
1100 symc_hard_context *ctx = HI_NULL; drv_symc_add_inbuf() local
1160 symc_hard_context *ctx = HI_NULL; drv_symc_add_outbuf() local
1196 symc_hard_context *ctx = HI_NULL; drv_symc_add_buf_usage() local
1221 symc_hard_context *ctx = HI_NULL; drv_aead_ccm_add_n() local
1286 symc_hard_context *ctx = HI_NULL; drv_aead_ccm_add_a() local
1351 symc_hard_context *ctx = HI_NULL; drv_aead_gcm_add_a() local
1404 symc_hard_context *ctx = HI_NULL; drv_aead_gcm_add_clen() local
1441 symc_hard_context *ctx = HI_NULL; drv_aead_get_tag() local
1521 symc_hard_context *ctx = HI_NULL; drv_symc_cfg() local
1557 drv_symc_set_isr_callback(hi_u32 chn_num, callback_symc_isr callback, hi_void *ctx) drv_symc_set_isr_callback() argument
1577 symc_hard_context *ctx = HI_NULL; drv_symc_start() local
1635 drv_symc_query_ram_interrupt(hi_u32 chn_num, symc_hard_context *ctx, hi_u32 timeout) drv_symc_query_ram_interrupt() argument
1671 symc_hard_context *ctx = HI_NULL; drv_symc_wait_done() local
[all...]

Completed in 12 milliseconds

12345678910>>...15