/kernel/linux/linux-6.6/drivers/gpu/drm/ |
H A D | drm_exec.c | 24 * struct drm_exec exec; 28 * drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT); 29 * drm_exec_until_all_locked(&exec) { 30 * ret = drm_exec_prepare_obj(&exec, boA, 1); 31 * drm_exec_retry_on_contention(&exec); 35 * ret = drm_exec_prepare_obj(&exec, boB, 1); 36 * drm_exec_retry_on_contention(&exec); 41 * drm_exec_for_each_locked_object(&exec, index, obj) { 45 * drm_exec_fini(&exec); 54 static void drm_exec_unlock_all(struct drm_exec *exec) in drm_exec_unlock_all() argument 75 drm_exec_init(struct drm_exec *exec, uint32_t flags) drm_exec_init() argument 95 drm_exec_fini(struct drm_exec *exec) drm_exec_fini() argument 114 drm_exec_cleanup(struct drm_exec *exec) drm_exec_cleanup() argument 134 drm_exec_obj_locked(struct drm_exec *exec, struct drm_gem_object *obj) drm_exec_obj_locked() argument 156 drm_exec_lock_contended(struct drm_exec *exec) drm_exec_lock_contended() argument 201 drm_exec_lock_obj(struct drm_exec *exec, struct drm_gem_object *obj) drm_exec_lock_obj() argument 254 drm_exec_unlock_obj(struct drm_exec *exec, struct drm_gem_object *obj) drm_exec_unlock_obj() argument 283 drm_exec_prepare_obj(struct drm_exec *exec, struct drm_gem_object *obj, unsigned int num_fences) drm_exec_prepare_obj() argument 315 drm_exec_prepare_array(struct drm_exec *exec, struct drm_gem_object **objects, unsigned int num_objects, unsigned int num_fences) drm_exec_prepare_array() argument [all...] |
/foundation/multimedia/media_foundation/engine/include/foundation/ |
H A D | log.h | 118 #define NOK_RETURN(exec) \ 120 Status returnValue = (exec); \ 129 #define NOK_LOG(exec) \ 131 Status returnValue = (exec); \ 138 // If exec not return zero, then record the error code, especially when call system C function. 140 #define NZERO_LOG(exec) \ 142 int returnValue = (exec); \ 144 MEDIA_LOG_E("NZERO_LOG when call (" #exec "), return " PUBLIC_LOG_D32, returnValue); \ 150 #define NZERO_RETURN(exec) \ 152 int returnValue = (exec); \ [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/tests/ |
H A D | drm_exec_test.c | 47 struct drm_exec exec; in sanitycheck() local 49 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT); in sanitycheck() 50 drm_exec_fini(&exec); in sanitycheck() 58 struct drm_exec exec; in test_lock() local 63 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT); in test_lock() 64 drm_exec_until_all_locked(&exec) { in test_lock() 65 ret = drm_exec_lock_obj(&exec, &gobj); in test_lock() 66 drm_exec_retry_on_contention(&exec); in test_lock() 71 drm_exec_fini(&exec); in test_lock() 78 struct drm_exec exec; in test_lock_unlock() local 105 struct drm_exec exec; test_duplicates() local 132 struct drm_exec exec; test_prepare() local 156 struct drm_exec exec; test_prepare_array() local 175 struct drm_exec exec; test_multiple_loops() local [all...] |
/foundation/multimedia/media_foundation/interface/inner_api/common/ |
H A D | log.h | 194 #define NOK_RETURN(exec) \ 196 Status returnValue = (exec); \ 205 #define NOK_LOG(exec) \ 207 Status returnValue = (exec); \ 214 // If exec not return zero, then record the error code, especially when call system C function. 216 #define NZERO_LOG(exec) \ 218 int returnValue = (exec); \ 220 MEDIA_LOG_E("NZERO_LOG when call (" #exec "), return " PUBLIC_LOG_D32, returnValue); \ 226 #define NZERO_RETURN(exec) \ 228 int returnValue = (exec); \ [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/vc4/ |
H A D | vc4_gem.c | 157 struct vc4_exec_info *exec[2]; in vc4_save_hang_state() local 169 exec[0] = vc4_first_bin_job(vc4); in vc4_save_hang_state() 170 exec[1] = vc4_first_render_job(vc4); in vc4_save_hang_state() 171 if (!exec[0] && !exec[1]) { in vc4_save_hang_state() 179 if (!exec[i]) in vc4_save_hang_state() 183 list_for_each_entry(bo, &exec[i]->unref_list, unref_head) in vc4_save_hang_state() 185 state->bo_count += exec[i]->bo_count + unref_list_count; in vc4_save_hang_state() 198 if (!exec[i]) in vc4_save_hang_state() 201 for (j = 0; j < exec[ in vc4_save_hang_state() 475 struct vc4_exec_info *exec; vc4_submit_next_bin_job() local 520 struct vc4_exec_info *exec = vc4_first_render_job(vc4); vc4_submit_next_render_job() local 541 vc4_move_job_to_render(struct drm_device *dev, struct vc4_exec_info *exec) vc4_move_job_to_render() argument 555 vc4_update_bo_seqnos(struct vc4_exec_info *exec, uint64_t seqno) vc4_update_bo_seqnos() argument 582 vc4_unlock_bo_reservations(struct drm_device *dev, struct vc4_exec_info *exec, struct ww_acquire_ctx *acquire_ctx) vc4_unlock_bo_reservations() argument 602 vc4_lock_bo_reservations(struct drm_device *dev, struct vc4_exec_info *exec, struct ww_acquire_ctx *acquire_ctx) vc4_lock_bo_reservations() argument 681 vc4_queue_submit(struct drm_device *dev, struct vc4_exec_info *exec, struct ww_acquire_ctx *acquire_ctx, struct drm_syncobj *out_sync) vc4_queue_submit() argument 744 vc4_cl_lookup_bos(struct drm_device *dev, struct drm_file *file_priv, struct vc4_exec_info *exec) vc4_cl_lookup_bos() argument 799 vc4_get_bcl(struct drm_device *dev, struct vc4_exec_info *exec) vc4_get_bcl() argument 919 vc4_complete_exec(struct drm_device *dev, struct vc4_exec_info *exec) vc4_complete_exec() argument 978 struct vc4_exec_info *exec = vc4_job_handle_completed() local 1125 struct vc4_exec_info *exec; vc4_submit_cl_ioctl() local [all...] |
H A D | vc4_validate.c | 51 struct vc4_exec_info *exec, \ 106 vc4_use_bo(struct vc4_exec_info *exec, uint32_t hindex) in vc4_use_bo() argument 108 struct vc4_dev *vc4 = exec->dev; in vc4_use_bo() 115 if (hindex >= exec->bo_count) { in vc4_use_bo() 117 hindex, exec->bo_count); in vc4_use_bo() 120 obj = to_drm_gem_dma_obj(exec->bo[hindex]); in vc4_use_bo() 133 vc4_use_handle(struct vc4_exec_info *exec, uint32_t gem_handles_packet_index) in vc4_use_handle() argument 135 return vc4_use_bo(exec, exec->bo_index[gem_handles_packet_index]); in vc4_use_handle() 139 validate_bin_pos(struct vc4_exec_info *exec, voi argument 163 vc4_check_tex_size(struct vc4_exec_info *exec, struct drm_gem_dma_object *fbo, uint32_t offset, uint8_t tiling_format, uint32_t width, uint32_t height, uint8_t cpp) vc4_check_tex_size() argument 488 vc4_validate_bin_cl(struct drm_device *dev, void *validated, void *unvalidated, struct vc4_exec_info *exec) vc4_validate_bin_cl() argument 573 reloc_tex(struct vc4_exec_info *exec, void *uniform_data_u, struct vc4_texture_sample_info *sample, uint32_t texture_handle_index, bool is_cs) reloc_tex() argument 756 validate_gl_shader_rec(struct drm_device *dev, struct vc4_exec_info *exec, struct vc4_shader_state *state) validate_gl_shader_rec() argument 938 vc4_validate_shader_recs(struct drm_device *dev, struct vc4_exec_info *exec) vc4_validate_shader_recs() argument [all...] |
H A D | vc4_irq.c | 67 struct vc4_exec_info *exec; in vc4_overflow_mem_work() local 92 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 93 if (!exec) in vc4_overflow_mem_work() 94 exec = vc4_last_render_job(vc4); in vc4_overflow_mem_work() 95 if (exec) { in vc4_overflow_mem_work() 96 exec->bin_slots |= vc4->bin_alloc_overflow; in vc4_overflow_mem_work() 120 struct vc4_exec_info *next, *exec = vc4_first_bin_job(vc4); in vc4_irq_finish_bin_job() local 122 if (!exec) in vc4_irq_finish_bin_job() 125 trace_vc4_bcl_end_irq(dev, exec->seqno); in vc4_irq_finish_bin_job() 127 vc4_move_job_to_render(dev, exec); in vc4_irq_finish_bin_job() 142 struct vc4_exec_info *exec = vc4_first_bin_job(vc4); vc4_cancel_bin_job() local 159 struct vc4_exec_info *exec = vc4_first_render_job(vc4); vc4_irq_finish_render_job() local [all...] |
H A D | vc4_render_cl.c | 99 static uint32_t vc4_full_res_offset(struct vc4_exec_info *exec, in vc4_full_res_offset() argument 105 (DIV_ROUND_UP(exec->args->width, 32) * y + x); in vc4_full_res_offset() 123 static void emit_tile(struct vc4_exec_info *exec, in emit_tile() argument 127 struct drm_vc4_submit_cl *args = exec->args; in emit_tile() 139 vc4_full_res_offset(exec, setup->color_read, in emit_tile() 161 vc4_full_res_offset(exec, setup->zs_read, in emit_tile() 185 rcl_u32(setup, (exec->tile_alloc_offset + in emit_tile() 186 (y * exec->bin_tiles_x + x) * 32)); in emit_tile() 201 vc4_full_res_offset(exec, setup->msaa_color_write, in emit_tile() 219 vc4_full_res_offset(exec, setu in emit_tile() 253 vc4_create_rcl_bo(struct drm_device *dev, struct vc4_exec_info *exec, struct vc4_rcl_setup *setup) vc4_create_rcl_bo() argument 383 vc4_full_res_bounds_check(struct vc4_exec_info *exec, struct drm_gem_dma_object *obj, struct drm_vc4_submit_rcl_surface *surf) vc4_full_res_bounds_check() argument 409 vc4_rcl_msaa_surface_setup(struct vc4_exec_info *exec, struct drm_gem_dma_object **obj, struct drm_vc4_submit_rcl_surface *surf) vc4_rcl_msaa_surface_setup() argument 435 vc4_rcl_surface_setup(struct vc4_exec_info *exec, struct drm_gem_dma_object **obj, struct drm_vc4_submit_rcl_surface *surf, bool is_write) vc4_rcl_surface_setup() argument 534 vc4_rcl_render_config_surface_setup(struct vc4_exec_info *exec, struct vc4_rcl_setup *setup, struct drm_gem_dma_object **obj, struct drm_vc4_submit_rcl_surface *surf) vc4_rcl_render_config_surface_setup() argument 594 vc4_get_rcl(struct drm_device *dev, struct vc4_exec_info *exec) vc4_get_rcl() argument [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/vc4/ |
H A D | vc4_gem.c | 154 struct vc4_exec_info *exec[2]; in vc4_save_hang_state() local 166 exec[0] = vc4_first_bin_job(vc4); in vc4_save_hang_state() 167 exec[1] = vc4_first_render_job(vc4); in vc4_save_hang_state() 168 if (!exec[0] && !exec[1]) { in vc4_save_hang_state() 176 if (!exec[i]) in vc4_save_hang_state() 180 list_for_each_entry(bo, &exec[i]->unref_list, unref_head) in vc4_save_hang_state() 182 state->bo_count += exec[i]->bo_count + unref_list_count; in vc4_save_hang_state() 195 if (!exec[i]) in vc4_save_hang_state() 198 for (j = 0; j < exec[ in vc4_save_hang_state() 469 struct vc4_exec_info *exec; vc4_submit_next_bin_job() local 509 struct vc4_exec_info *exec = vc4_first_render_job(vc4); vc4_submit_next_render_job() local 526 vc4_move_job_to_render(struct drm_device *dev, struct vc4_exec_info *exec) vc4_move_job_to_render() argument 537 vc4_update_bo_seqnos(struct vc4_exec_info *exec, uint64_t seqno) vc4_update_bo_seqnos() argument 562 vc4_unlock_bo_reservations(struct drm_device *dev, struct vc4_exec_info *exec, struct ww_acquire_ctx *acquire_ctx) vc4_unlock_bo_reservations() argument 585 vc4_lock_bo_reservations(struct drm_device *dev, struct vc4_exec_info *exec, struct ww_acquire_ctx *acquire_ctx) vc4_lock_bo_reservations() argument 664 vc4_queue_submit(struct drm_device *dev, struct vc4_exec_info *exec, struct ww_acquire_ctx *acquire_ctx, struct drm_syncobj *out_sync) vc4_queue_submit() argument 727 vc4_cl_lookup_bos(struct drm_device *dev, struct drm_file *file_priv, struct vc4_exec_info *exec) vc4_cl_lookup_bos() argument 821 vc4_get_bcl(struct drm_device *dev, struct vc4_exec_info *exec) vc4_get_bcl() argument 941 vc4_complete_exec(struct drm_device *dev, struct vc4_exec_info *exec) vc4_complete_exec() argument 997 struct vc4_exec_info *exec = vc4_job_handle_completed() local 1134 struct vc4_exec_info *exec; vc4_submit_cl_ioctl() local [all...] |
H A D | vc4_validate.c | 51 struct vc4_exec_info *exec, \ 106 vc4_use_bo(struct vc4_exec_info *exec, uint32_t hindex) in vc4_use_bo() argument 111 if (hindex >= exec->bo_count) { in vc4_use_bo() 113 hindex, exec->bo_count); in vc4_use_bo() 116 obj = exec->bo[hindex]; in vc4_use_bo() 129 vc4_use_handle(struct vc4_exec_info *exec, uint32_t gem_handles_packet_index) in vc4_use_handle() argument 131 return vc4_use_bo(exec, exec->bo_index[gem_handles_packet_index]); in vc4_use_handle() 135 validate_bin_pos(struct vc4_exec_info *exec, void *untrusted, uint32_t pos) in validate_bin_pos() argument 140 return (untrusted - 1 == exec in validate_bin_pos() 159 vc4_check_tex_size(struct vc4_exec_info *exec, struct drm_gem_cma_object *fbo, uint32_t offset, uint8_t tiling_format, uint32_t width, uint32_t height, uint8_t cpp) vc4_check_tex_size() argument 480 vc4_validate_bin_cl(struct drm_device *dev, void *validated, void *unvalidated, struct vc4_exec_info *exec) vc4_validate_bin_cl() argument 561 reloc_tex(struct vc4_exec_info *exec, void *uniform_data_u, struct vc4_texture_sample_info *sample, uint32_t texture_handle_index, bool is_cs) reloc_tex() argument 744 validate_gl_shader_rec(struct drm_device *dev, struct vc4_exec_info *exec, struct vc4_shader_state *state) validate_gl_shader_rec() argument 926 vc4_validate_shader_recs(struct drm_device *dev, struct vc4_exec_info *exec) vc4_validate_shader_recs() argument [all...] |
H A D | vc4_render_cl.c | 99 static uint32_t vc4_full_res_offset(struct vc4_exec_info *exec, in vc4_full_res_offset() argument 105 (DIV_ROUND_UP(exec->args->width, 32) * y + x); in vc4_full_res_offset() 123 static void emit_tile(struct vc4_exec_info *exec, in emit_tile() argument 127 struct drm_vc4_submit_cl *args = exec->args; in emit_tile() 139 vc4_full_res_offset(exec, setup->color_read, in emit_tile() 161 vc4_full_res_offset(exec, setup->zs_read, in emit_tile() 185 rcl_u32(setup, (exec->tile_alloc_offset + in emit_tile() 186 (y * exec->bin_tiles_x + x) * 32)); in emit_tile() 201 vc4_full_res_offset(exec, setup->msaa_color_write, in emit_tile() 219 vc4_full_res_offset(exec, setu in emit_tile() 253 vc4_create_rcl_bo(struct drm_device *dev, struct vc4_exec_info *exec, struct vc4_rcl_setup *setup) vc4_create_rcl_bo() argument 383 vc4_full_res_bounds_check(struct vc4_exec_info *exec, struct drm_gem_cma_object *obj, struct drm_vc4_submit_rcl_surface *surf) vc4_full_res_bounds_check() argument 409 vc4_rcl_msaa_surface_setup(struct vc4_exec_info *exec, struct drm_gem_cma_object **obj, struct drm_vc4_submit_rcl_surface *surf) vc4_rcl_msaa_surface_setup() argument 435 vc4_rcl_surface_setup(struct vc4_exec_info *exec, struct drm_gem_cma_object **obj, struct drm_vc4_submit_rcl_surface *surf, bool is_write) vc4_rcl_surface_setup() argument 534 vc4_rcl_render_config_surface_setup(struct vc4_exec_info *exec, struct vc4_rcl_setup *setup, struct drm_gem_cma_object **obj, struct drm_vc4_submit_rcl_surface *surf) vc4_rcl_render_config_surface_setup() argument 594 vc4_get_rcl(struct drm_device *dev, struct vc4_exec_info *exec) vc4_get_rcl() argument [all...] |
H A D | vc4_irq.c | 64 struct vc4_exec_info *exec; in vc4_overflow_mem_work() local 89 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work() 90 if (!exec) in vc4_overflow_mem_work() 91 exec = vc4_last_render_job(vc4); in vc4_overflow_mem_work() 92 if (exec) { in vc4_overflow_mem_work() 93 exec->bin_slots |= vc4->bin_alloc_overflow; in vc4_overflow_mem_work() 117 struct vc4_exec_info *next, *exec = vc4_first_bin_job(vc4); in vc4_irq_finish_bin_job() local 119 if (!exec) in vc4_irq_finish_bin_job() 122 vc4_move_job_to_render(dev, exec); in vc4_irq_finish_bin_job() 129 if (next && next->perfmon == exec in vc4_irq_finish_bin_job() 137 struct vc4_exec_info *exec = vc4_first_bin_job(vc4); vc4_cancel_bin_job() local 154 struct vc4_exec_info *exec = vc4_first_render_job(vc4); vc4_irq_finish_render_job() local [all...] |
/kernel/linux/linux-6.6/include/drm/ |
H A D | drm_exec.h | 56 * @exec: Pointer to the drm_exec context 63 drm_exec_obj(struct drm_exec *exec, unsigned long index) in drm_exec_obj() argument 65 return index < exec->num_objects ? exec->objects[index] : NULL; in drm_exec_obj() 70 * @exec: drm_exec object 76 #define drm_exec_for_each_locked_object(exec, index, obj) \ 77 for ((index) = 0; ((obj) = drm_exec_obj(exec, index)); ++(index)) 82 * @exec: drm_exec object 90 #define drm_exec_for_each_locked_object_reverse(exec, index, obj) \ 91 for ((index) = (exec) 133 drm_exec_is_contended(struct drm_exec *exec) drm_exec_is_contended() argument [all...] |
/kernel/linux/linux-5.10/include/uapi/linux/ |
H A D | a.out.h | 44 #define N_MAGIC(exec) ((exec).a_info & 0xffff) 46 #define N_MACHTYPE(exec) ((enum machine_type)(((exec).a_info >> 16) & 0xff)) 47 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) 48 #define N_SET_INFO(exec, magic, type, flags) \ 49 ((exec).a_info = ((magic) & 0xffff) \ 52 #define N_SET_MAGIC(exec, magic) \ 53 ((exec) [all...] |
/kernel/linux/linux-6.6/include/uapi/linux/ |
H A D | a.out.h | 44 #define N_MAGIC(exec) ((exec).a_info & 0xffff) 46 #define N_MACHTYPE(exec) ((enum machine_type)(((exec).a_info >> 16) & 0xff)) 47 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) 48 #define N_SET_INFO(exec, magic, type, flags) \ 49 ((exec).a_info = ((magic) & 0xffff) \ 52 #define N_SET_MAGIC(exec, magic) \ 53 ((exec) [all...] |
/kernel/linux/patches/linux-6.6/prebuilts/usr/include/linux/ |
H A D | a.out.h | 52 #define N_MAGIC(exec) ((exec).a_info & 0xffff) 54 #define N_MACHTYPE(exec) ((enum machine_type) (((exec).a_info >> 16) & 0xff)) 55 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) 56 #define N_SET_INFO(exec,magic,type,flags) ((exec).a_info = ((magic) & 0xffff) | (((int) (type) & 0xff) << 16) | (((flags) & 0xff) << 24)) 57 #define N_SET_MAGIC(exec,magic) ((exec) [all...] |
/kernel/linux/patches/linux-5.10/prebuilts/usr/include/linux/ |
H A D | a.out.h | 52 #define N_MAGIC(exec) ((exec).a_info & 0xffff) 54 #define N_MACHTYPE(exec) ((enum machine_type) (((exec).a_info >> 16) & 0xff)) 55 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) 56 #define N_SET_INFO(exec,magic,type,flags) ((exec).a_info = ((magic) & 0xffff) | (((int) (type) & 0xff) << 16) | (((flags) & 0xff) << 24)) 57 #define N_SET_MAGIC(exec,magic) ((exec) [all...] |
/kernel/linux/patches/linux-4.19/prebuilts/usr/include/linux/ |
H A D | a.out.h | 40 #define N_MAGIC(exec) ((exec).a_info & 0xffff) 42 #define N_MACHTYPE(exec) ((enum machine_type)(((exec).a_info >> 16) & 0xff)) 43 #define N_FLAGS(exec) (((exec).a_info >> 24) & 0xff) 44 #define N_SET_INFO(exec, magic, type, flags) \ 45 ((exec).a_info = ((magic) & 0xffff) \ 48 #define N_SET_MAGIC(exec, magic) \ 49 ((exec) [all...] |
/foundation/multimedia/media_foundation/engine/include/pipeline/core/ |
H A D | error_code.h | 52 #define FAIL_RETURN(exec) \ 54 ErrorCode returnValue = (exec); \ 63 #define FAIL_RETURN_MSG_IMPL(loglevel, exec, fmt, args...) \ 65 ErrorCode returnValue = (exec); \ 74 #define FAIL_RETURN_MSG(exec, fmt, args...) FAIL_RETURN_MSG_IMPL(MEDIA_LOG_E, exec, fmt, ##args) 78 #define FAIL_RETURN_MSG_W(exec, fmt, args...) FAIL_RETURN_MSG_IMPL(MEDIA_LOG_W, exec, fmt, ##args) 82 #define FAIL_LOG(exec) \ 84 ErrorCode returnValue = (exec); \ [all...] |
/foundation/arkui/ace_engine/frameworks/bridge/declarative_frontend/engine/ |
H A D | js_execution_scope_defines.h | 20 #define JAVASCRIPT_EXECUTION_SCOPE(exec) \ 21 panda::LocalScope socpe(exec.vm_); 28 #define CHECK_JAVASCRIPT_SCOPE(exec, ...) \ 29 if (JsiDeclarativeEngineInstance::GetCurrentRuntime() == nullptr || exec.vm_ == nullptr) \ 31 #define JAVASCRIPT_EXECUTION_SCOPE_WITH_CHECK(exec, ...) \ 32 CHECK_JAVASCRIPT_SCOPE(exec, __VA_ARGS__) \ 33 JAVASCRIPT_EXECUTION_SCOPE(exec) 35 #define JAVASCRIPT_EXECUTION_SCOPE(exec) 38 #define JAVASCRIPT_EXECUTION_SCOPE_WITH_CHECK(exec, ...)
|
/foundation/distributedhardware/distributed_hardware_fwk/av_transport/common/include/ |
H A D | av_trans_log.h | 46 #define TRUE_RETURN(exec, fmt, args...) \
48 bool retCode = (exec); \
57 #define TRUE_RETURN_V(exec, ret) \
59 bool retCode = (exec); \
67 #define TRUE_RETURN_V_MSG_E(exec, ret, fmt, args...) \
69 bool retCode = (exec); \
78 #define TRUE_RETURN_V_MSG_D(exec, ret, fmt, args...) \
80 bool retCode = (exec); \
89 #define TRUE_LOG_MSG(exec, fmt, args...) \
91 bool retCode = (exec); \
[all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_csa.c | 70 struct drm_exec exec; in amdgpu_map_static_csa() local 73 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT); in amdgpu_map_static_csa() 74 drm_exec_until_all_locked(&exec) { in amdgpu_map_static_csa() 75 r = amdgpu_vm_lock_pd(vm, &exec, 0); in amdgpu_map_static_csa() 77 r = drm_exec_lock_obj(&exec, &bo->tbo.base); in amdgpu_map_static_csa() 78 drm_exec_retry_on_contention(&exec); in amdgpu_map_static_csa() 102 drm_exec_fini(&exec); in amdgpu_map_static_csa() 110 struct drm_exec exec; in amdgpu_unmap_static_csa() local 113 drm_exec_init(&exec, DRM_EXEC_INTERRUPTIBLE_WAIT); in amdgpu_unmap_static_csa() 114 drm_exec_until_all_locked(&exec) { in amdgpu_unmap_static_csa() [all...] |
/foundation/multimedia/image_effect/interfaces/inner_api/native/common/ |
H A D | error_code.h | 125 #define FALSE_RETURN_MSG_W(exec, ret, fmt, args...) \ 127 bool returnValue = (exec); \ 136 #define FALSE_RETURN_E(exec, ret) \ 138 bool returnValue = (exec); \ 140 EFFECT_LOGE("FALSE_RETURN " #exec); \ 147 #define FALSE_RETURN_MSG_E(exec, ret, fmt, args...) \ 149 bool returnValue = (exec); \ 158 #define FAIL_RETURN(exec) \ 160 ErrorCode returnValue = (exec); \
|
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/ |
H A D | nouveau_exec.c | 67 * DRM_NOUVEAU_EXEC ioctl is called to submit an exec job. 94 struct drm_exec *exec = &job->exec; in nouveau_exec_job_submit() local 105 drm_exec_init(exec, DRM_EXEC_INTERRUPTIBLE_WAIT | in nouveau_exec_job_submit() 107 drm_exec_until_all_locked(exec) { in nouveau_exec_job_submit() 114 ret = drm_exec_prepare_obj(exec, va->gem.obj, 1); in nouveau_exec_job_submit() 115 drm_exec_retry_on_contention(exec); in nouveau_exec_job_submit() 122 drm_exec_for_each_locked_object(exec, index, obj) { in nouveau_exec_job_submit() 135 drm_exec_fini(exec); in nouveau_exec_job_submit() 143 struct drm_exec *exec in nouveau_exec_job_armed_submit() local [all...] |
/kernel/linux/linux-5.10/tools/perf/util/ |
H A D | comm.c | 108 struct comm *comm__new(const char *str, u64 timestamp, bool exec) in comm__new() argument 116 comm->exec = exec; in comm__new() 127 int comm__override(struct comm *comm, const char *str, u64 timestamp, bool exec) in comm__override() argument 138 if (exec) in comm__override() 139 comm->exec = true; in comm__override()
|