Lines Matching refs:job
35 v3dX(job_emit_binning_flush)(struct v3dv_job *job)
37 assert(job);
39 v3dv_cl_ensure_space_with_branch(&job->bcl, cl_packet_length(FLUSH));
40 v3dv_return_if_oom(NULL, job);
42 cl_emit(&job->bcl, FLUSH, flush);
46 v3dX(job_emit_binning_prolog)(struct v3dv_job *job,
53 cl_emit(&job->bcl, NUMBER_OF_LAYERS, config) {
58 cl_emit(&job->bcl, TILE_BINNING_MODE_CFG, config) {
68 cl_emit(&job->bcl, FLUSH_VCD_CACHE, bin);
73 cl_emit(&job->bcl, START_TILE_BINNING, bin);
79 assert(cmd_buffer->state.job);
80 v3dv_cl_ensure_space_with_branch(&cmd_buffer->state.job->bcl,
83 cl_emit(&cmd_buffer->state.job->bcl, RETURN_FROM_SUB_LIST, ret);
87 v3dX(job_emit_clip_window)(struct v3dv_job *job, const VkRect2D *rect)
89 assert(job);
91 v3dv_cl_ensure_space_with_branch(&job->bcl, cl_packet_length(CLIP_WINDOW));
92 v3dv_return_if_oom(NULL, job);
94 cl_emit(&job->bcl, CLIP_WINDOW, clip) {
169 if (state->job->first_subpass > first_subpass_idx)
172 /* If the job is continuing a subpass started in another job, we always
175 if (state->job->is_subpass_continue)
228 * if the current job is continuing subpass work started by a previous
229 * job, for the same reason.
377 /* If this is resuming a subpass started with another job, then attachment
380 if (state->job->is_subpass_continue)
389 /* If this job is running in a subpass other than the first subpass in
393 if (state->job->first_subpass != first_subpass_idx)
419 /* Attachment store operations only apply on the last job we emit on the the
423 if (!state->job->is_subpass_finish)
452 assert(state->job->first_subpass >= ds_attachment->first_subpass);
546 * If we have configured the job to do early Z/S clearing, then we
552 use_global_zs_clear = !state->job->early_zs_clear &&
573 assert(state->job->first_subpass >= attachment->first_subpass);
608 * doing the resolve in a shader separately after this job, so we will
662 struct v3dv_job *job = cmd_buffer->state.job;
663 assert(job);
668 struct v3dv_cl *cl = &job->indirect;
698 cl_emit(&job->rcl, START_ADDRESS_OF_GENERIC_TILE_LIST, branch) {
710 struct v3dv_job *job = cmd_buffer->state.job;
711 struct v3dv_cl *rcl = &job->rcl;
716 const struct v3dv_frame_tiling *tiling = &job->frame_tiling;
720 list.address = v3dv_cl_address(job->tile_alloc, tile_alloc_offset);
754 set_rcl_early_z_config(struct v3dv_job *job,
758 /* Disable if none of the draw calls in this job enabled EZ */
759 if (!job->has_ez_draws) {
764 switch (job->first_ez_state) {
783 struct v3dv_job *job = cmd_buffer->state.job;
784 assert(job);
799 const struct v3dv_frame_tiling *tiling = &job->frame_tiling;
801 const uint32_t fb_layers = job->frame_tiling.layers;
803 v3dv_cl_ensure_space_with_branch(&job->rcl, 200 +
811 struct v3dv_cl *rcl = &job->rcl;
833 set_rcl_early_z_config(job,
837 /* Early-Z/S clear can be enabled if the job is clearing and not
894 * in the job so we can skip these later.
896 job->early_zs_clear = do_early_zs_clear;
1027 clear.clear_z_stencil_buffer = !job->early_zs_clear;
1054 struct v3dv_job *job = cmd_buffer->state.job;
1055 assert(job);
1062 v3dv_cl_ensure_space_with_branch(&job->bcl, required_cl_size);
1065 cl_emit(&job->bcl, CLIPPER_XY_SCALING, clip) {
1070 cl_emit(&job->bcl, CLIPPER_Z_SCALE_AND_OFFSET, clip) {
1074 cl_emit(&job->bcl, CLIPPER_Z_MIN_MAX_CLIPPING_PLANES, clip) {
1082 cl_emit(&job->bcl, VIEWPORT_OFFSET, vp) {
1093 struct v3dv_job *job = cmd_buffer->state.job;
1094 assert(job);
1103 v3dv_cl_ensure_space_with_branch(&job->bcl,
1111 cl_emit_with_prepacked(&job->bcl, STENCIL_CFG,
1130 cl_emit_prepacked(&job->bcl, &pipeline->stencil_cfg[i]);
1155 struct v3dv_job *job = cmd_buffer->state.job;
1156 assert(job);
1158 v3dv_cl_ensure_space_with_branch(&job->bcl, cl_packet_length(DEPTH_OFFSET));
1162 cl_emit(&job->bcl, DEPTH_OFFSET, bias) {
1176 struct v3dv_job *job = cmd_buffer->state.job;
1177 assert(job);
1179 v3dv_cl_ensure_space_with_branch(&job->bcl, cl_packet_length(LINE_WIDTH));
1182 cl_emit(&job->bcl, LINE_WIDTH, line) {
1195 struct v3dv_job *job = cmd_buffer->state.job;
1196 assert(job);
1198 v3dv_cl_ensure_space_with_branch(&job->bcl, cl_packet_length(SAMPLE_STATE));
1201 cl_emit(&job->bcl, SAMPLE_STATE, state) {
1210 struct v3dv_job *job = cmd_buffer->state.job;
1211 assert(job);
1221 v3dv_cl_ensure_space_with_branch(&job->bcl, blend_packets_size);
1226 cl_emit(&job->bcl, BLEND_ENABLES, enables) {
1233 cl_emit_prepacked(&job->bcl, &pipeline->blend.cfg[i]);
1240 cl_emit(&job->bcl, BLEND_CONSTANT_COLOR, color) {
1253 struct v3dv_job *job = cmd_buffer->state.job;
1254 v3dv_cl_ensure_space_with_branch(&job->bcl, cl_packet_length(COLOR_WRITE_MASKS));
1258 cl_emit(&job->bcl, COLOR_WRITE_MASKS, mask) {
1267 emit_flat_shade_flags(struct v3dv_job *job,
1273 v3dv_cl_ensure_space_with_branch(&job->bcl,
1275 v3dv_return_if_oom(NULL, job);
1277 cl_emit(&job->bcl, FLAT_SHADE_FLAGS, flags) {
1286 emit_noperspective_flags(struct v3dv_job *job,
1292 v3dv_cl_ensure_space_with_branch(&job->bcl,
1294 v3dv_return_if_oom(NULL, job);
1296 cl_emit(&job->bcl, NON_PERSPECTIVE_FLAGS, flags) {
1305 emit_centroid_flags(struct v3dv_job *job,
1311 v3dv_cl_ensure_space_with_branch(&job->bcl,
1313 v3dv_return_if_oom(NULL, job);
1315 cl_emit(&job->bcl, CENTROID_FLAGS, flags) {
1324 emit_varying_flags(struct v3dv_job *job,
1327 void (*flag_emit_callback)(struct v3dv_job *job,
1339 flag_emit_callback(job, i, flags[i],
1343 flag_emit_callback(job, i, flags[i],
1347 flag_emit_callback(job, i, flags[i],
1361 struct v3dv_job *job = cmd_buffer->state.job;
1373 if (!emit_varying_flags(job, num_flags, flat_shade_flags,
1376 &job->bcl, cl_packet_length(ZERO_ALL_FLAT_SHADE_FLAGS));
1379 cl_emit(&job->bcl, ZERO_ALL_FLAT_SHADE_FLAGS, flags);
1382 if (!emit_varying_flags(job, num_flags, noperspective_flags,
1385 &job->bcl, cl_packet_length(ZERO_ALL_NON_PERSPECTIVE_FLAGS));
1388 cl_emit(&job->bcl, ZERO_ALL_NON_PERSPECTIVE_FLAGS, flags);
1391 if (!emit_varying_flags(job, num_flags, centroid_flags,
1394 &job->bcl, cl_packet_length(ZERO_ALL_CENTROID_FLAGS));
1397 cl_emit(&job->bcl, ZERO_ALL_CENTROID_FLAGS, flags);
1401 /* Updates job early Z state tracking. Returns False if EZ must be disabled
1405 job_update_ez_state(struct v3dv_job *job,
1411 * this job. This will cause us to disable EZ for the entire job in the
1413 * we never emit a draw call in the job with EZ enabled in the CFG_BITS
1416 if (job->first_ez_state == V3D_EZ_DISABLED) {
1417 assert(job->ez_state == V3D_EZ_DISABLED);
1424 if (job->ez_state == V3D_EZ_DISABLED)
1432 /* If this is the first time we update EZ state for this job we first check
1434 * job (based on state that is not related to the current draw call and
1437 if (!job->decided_global_ez_enable) {
1438 job->decided_global_ez_enable = true;
1444 job->first_ez_state = V3D_EZ_DISABLED;
1445 job->ez_state = V3D_EZ_DISABLED;
1473 job->first_ez_state = V3D_EZ_DISABLED;
1474 job->ez_state = V3D_EZ_DISABLED;
1481 job->first_ez_state = V3D_EZ_DISABLED;
1482 job->ez_state = V3D_EZ_DISABLED;
1506 if (job->ez_state == V3D_EZ_UNDECIDED) {
1507 job->ez_state = pipeline->ez_state;
1508 } else if (job->ez_state != pipeline->ez_state) {
1520 if (job->first_ez_state == V3D_EZ_UNDECIDED && !disable_ez) {
1521 assert(job->ez_state != V3D_EZ_DISABLED);
1522 job->first_ez_state = job->ez_state;
1531 job->ez_state = V3D_EZ_DISABLED;
1535 job->has_ez_draws = true;
1543 struct v3dv_job *job = cmd_buffer->state.job;
1544 assert(job);
1549 bool enable_ez = job_update_ez_state(job, pipeline, cmd_buffer);
1551 v3dv_cl_ensure_space_with_branch(&job->bcl, cl_packet_length(CFG_BITS));
1554 cl_emit_with_prepacked(&job->bcl, CFG_BITS, pipeline->cfg_bits, config) {
1564 struct v3dv_job *job = cmd_buffer->state.job;
1565 assert(job);
1567 v3dv_cl_ensure_space_with_branch(&job->bcl,
1571 cl_emit(&job->bcl, OCCLUSION_QUERY_COUNTER, counter) {
1588 struct v3dv_job *job =
1591 if (!job)
1595 job->serialize = V3DV_BARRIER_ALL;
1596 job->needs_bcl_sync = is_bcl_barrier;
1597 return job;
1633 assert(primary->state.job);
1643 /* FIXME: if our primary job tiling doesn't enable MSSA but any of the
1645 * job to enable MSAA. See cmd_buffer_restart_job_for_msaa_if_needed.
1657 /* If the job is a CL, then we branch to it from the primary BCL.
1673 * need to split the primary to create a new job that can consume
1677 * the RETURN_FROM_SUB_LIST into the primary job to skip the
1680 struct v3dv_job *primary_job = primary->state.job;
1700 /* Make sure our primary job has all required BO references */
1721 /* This is a regular job (CPU or GPU), so just finish the current
1722 * primary job (if any) and then add the secondary job to the
1723 * primary's job list right after it.
1746 * current primary job is finished.
1767 emit_gs_shader_state_record(struct v3dv_job *job,
1774 cl_emit(&job->indirect, GEOMETRY_SHADER_STATE_RECORD, shader) {
1813 emit_tes_gs_common_params(struct v3dv_job *job,
1817 cl_emit(&job->indirect, TESSELLATION_GEOMETRY_COMMON_PARAMS, shader) {
1848 emit_tes_gs_shader_params(struct v3dv_job *job,
1853 cl_emit(&job->indirect, TESSELLATION_GEOMETRY_SHADER_PARAMS, shader) {
1880 struct v3dv_job *job = cmd_buffer->state.job;
1881 assert(job);
1914 job->tmu_dirty_rcl |= prog_data_vs_bin->base.tmu_dirty_rcl;
1915 job->tmu_dirty_rcl |= prog_data_vs->base.tmu_dirty_rcl;
1916 job->tmu_dirty_rcl |= prog_data_fs->base.tmu_dirty_rcl;
1918 job->tmu_dirty_rcl |= prog_data_gs_bin->base.tmu_dirty_rcl;
1919 job->tmu_dirty_rcl |= prog_data_gs->base.tmu_dirty_rcl;
1935 v3dv_cl_ensure_space(&job->indirect,
1945 emit_gs_shader_state_record(job,
1952 emit_tes_gs_common_params(job,
1956 emit_tes_gs_shader_params(job,
1961 emit_tes_gs_shader_params(job,
1972 cl_emit_with_prepacked(&job->indirect, GL_SHADER_STATE_RECORD,
2029 cl_emit_with_prepacked(&job->indirect, GL_SHADER_STATE_ATTRIBUTE_RECORD,
2073 cl_emit(&job->indirect, GL_SHADER_STATE_ATTRIBUTE_RECORD, attr) {
2075 attr.address = v3dv_cl_address(job->indirect.bo, 0);
2087 v3dv_cl_ensure_space_with_branch(&job->bcl,
2091 cl_emit_prepacked(&job->bcl, &pipeline->vcm_cache_size);
2094 v3dv_cl_ensure_space_with_branch(&job->bcl,
2099 cl_emit(&job->bcl, GL_SHADER_STATE_INCLUDING_GS, state) {
2100 state.address = v3dv_cl_address(job->indirect.bo, shader_rec_offset);
2104 cl_emit(&job->bcl, GL_SHADER_STATE, state) {
2105 state.address = v3dv_cl_address(job->indirect.bo, shader_rec_offset);
2126 struct v3dv_job *job = cmd_buffer->state.job;
2127 assert(job);
2138 &job->bcl, cl_packet_length(BASE_VERTEX_BASE_INSTANCE));
2141 cl_emit(&job->bcl, BASE_VERTEX_BASE_INSTANCE, base) {
2149 &job->bcl, cl_packet_length(VERTEX_ARRAY_INSTANCED_PRIMS));
2152 cl_emit(&job->bcl, VERTEX_ARRAY_INSTANCED_PRIMS, prim) {
2160 &job->bcl, cl_packet_length(VERTEX_ARRAY_PRIMS));
2162 cl_emit(&job->bcl, VERTEX_ARRAY_PRIMS, prim) {
2173 struct v3dv_job *job = cmd_buffer->state.job;
2174 assert(job);
2176 /* We flag all state as dirty when we create a new job so make sure we
2183 &job->bcl, cl_packet_length(INDEX_BUFFER_SETUP));
2187 cl_emit(&job->bcl, INDEX_BUFFER_SETUP, ib) {
2205 struct v3dv_job *job = cmd_buffer->state.job;
2206 assert(job);
2215 &job->bcl, cl_packet_length(BASE_VERTEX_BASE_INSTANCE));
2218 cl_emit(&job->bcl, BASE_VERTEX_BASE_INSTANCE, base) {
2226 &job->bcl, cl_packet_length(INDEXED_PRIM_LIST));
2229 cl_emit(&job->bcl, INDEXED_PRIM_LIST, prim) {
2238 &job->bcl, cl_packet_length(INDEXED_INSTANCED_PRIM_LIST));
2241 cl_emit(&job->bcl, INDEXED_INSTANCED_PRIM_LIST, prim) {
2259 struct v3dv_job *job = cmd_buffer->state.job;
2260 assert(job);
2266 &job->bcl, cl_packet_length(INDIRECT_VERTEX_ARRAY_INSTANCED_PRIMS));
2269 cl_emit(&job->bcl, INDIRECT_VERTEX_ARRAY_INSTANCED_PRIMS, prim) {
2285 struct v3dv_job *job = cmd_buffer->state.job;
2286 assert(job);
2293 &job->bcl, cl_packet_length(INDIRECT_INDEXED_INSTANCED_PRIM_LIST));
2296 cl_emit(&job->bcl, INDIRECT_INDEXED_INSTANCED_PRIM_LIST, prim) {