1/************************************************************************** 2 * 3 * Copyright 2009 Younes Manton. 4 * All Rights Reserved. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the 8 * "Software"), to deal in the Software without restriction, including 9 * without limitation the rights to use, copy, modify, merge, publish, 10 * distribute, sub license, and/or sell copies of the Software, and to 11 * permit persons to whom the Software is furnished to do so, subject to 12 * the following conditions: 13 * 14 * The above copyright notice and this permission notice (including the 15 * next paragraph) shall be included in all copies or substantial portions 16 * of the Software. 17 * 18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. 21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR 22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 25 * 26 **************************************************************************/ 27 28#include <assert.h> 29 30#include "pipe/p_context.h" 31 32#include "util/u_sampler.h" 33#include "util/u_draw.h" 34 35#include "tgsi/tgsi_ureg.h" 36 37#include "vl_defines.h" 38#include "vl_vertex_buffers.h" 39#include "vl_mc.h" 40#include "vl_idct.h" 41 42enum VS_OUTPUT 43{ 44 VS_O_VPOS = 0, 45 VS_O_VTOP = 0, 46 VS_O_VBOTTOM, 47 48 VS_O_FLAGS = VS_O_VTOP, 49 VS_O_VTEX = VS_O_VBOTTOM 50}; 51 52static struct ureg_dst 53calc_position(struct vl_mc *r, struct ureg_program *shader, struct ureg_src block_scale) 54{ 55 struct ureg_src vrect, vpos; 56 struct ureg_dst t_vpos; 57 struct ureg_dst o_vpos; 58 59 vrect = ureg_DECL_vs_input(shader, VS_I_RECT); 60 vpos = ureg_DECL_vs_input(shader, VS_I_VPOS); 61 62 t_vpos = ureg_DECL_temporary(shader); 63 64 o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS); 65 66 /* 67 * block_scale = (VL_MACROBLOCK_WIDTH, VL_MACROBLOCK_HEIGHT) / (dst.width, dst.height) 68 * 69 * t_vpos = (vpos + vrect) * block_scale 70 * o_vpos.xy = t_vpos 71 * o_vpos.zw = vpos 72 */ 73 ureg_ADD(shader, ureg_writemask(t_vpos, TGSI_WRITEMASK_XY), vpos, vrect); 74 ureg_MUL(shader, ureg_writemask(t_vpos, TGSI_WRITEMASK_XY), ureg_src(t_vpos), block_scale); 75 ureg_MOV(shader, ureg_writemask(o_vpos, TGSI_WRITEMASK_XY), ureg_src(t_vpos)); 76 ureg_MOV(shader, ureg_writemask(o_vpos, TGSI_WRITEMASK_ZW), ureg_imm1f(shader, 1.0f)); 77 78 return t_vpos; 79} 80 81static struct ureg_dst 82calc_line(struct pipe_screen *screen, struct ureg_program *shader) 83{ 84 struct ureg_dst tmp; 85 struct ureg_src pos; 86 87 tmp = ureg_DECL_temporary(shader); 88 89 if (screen->get_param(screen, PIPE_CAP_FS_POSITION_IS_SYSVAL)) 90 pos = ureg_DECL_system_value(shader, TGSI_SEMANTIC_POSITION, 0); 91 else 92 pos = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS, 93 TGSI_INTERPOLATE_LINEAR); 94 95 /* 96 * tmp.y = fraction(pos.y / 2) >= 0.5 ? 1 : 0 97 */ 98 ureg_MUL(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y), pos, ureg_imm1f(shader, 0.5f)); 99 ureg_FRC(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y), ureg_src(tmp)); 100 ureg_SGE(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y), ureg_src(tmp), ureg_imm1f(shader, 0.5f)); 101 102 return tmp; 103} 104 105static void * 106create_ref_vert_shader(struct vl_mc *r) 107{ 108 struct ureg_program *shader; 109 struct ureg_src mv_scale; 110 struct ureg_src vmv[2]; 111 struct ureg_dst t_vpos; 112 struct ureg_dst o_vmv[2]; 113 unsigned i; 114 115 shader = ureg_create(PIPE_SHADER_VERTEX); 116 if (!shader) 117 return NULL; 118 119 vmv[0] = ureg_DECL_vs_input(shader, VS_I_MV_TOP); 120 vmv[1] = ureg_DECL_vs_input(shader, VS_I_MV_BOTTOM); 121 122 t_vpos = calc_position(r, shader, ureg_imm2f(shader, 123 (float)VL_MACROBLOCK_WIDTH / r->buffer_width, 124 (float)VL_MACROBLOCK_HEIGHT / r->buffer_height) 125 ); 126 127 o_vmv[0] = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP); 128 o_vmv[1] = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM); 129 130 /* 131 * mv_scale.xy = 0.5 / (dst.width, dst.height); 132 * mv_scale.z = 1.0f / 4.0f 133 * mv_scale.w = 1.0f / 255.0f 134 * 135 * // Apply motion vectors 136 * o_vmv[0..1].xy = vmv[0..1] * mv_scale + t_vpos 137 * o_vmv[0..1].zw = vmv[0..1] * mv_scale 138 * 139 */ 140 141 mv_scale = ureg_imm4f(shader, 142 0.5f / r->buffer_width, 143 0.5f / r->buffer_height, 144 1.0f / 4.0f, 145 1.0f / PIPE_VIDEO_MV_WEIGHT_MAX); 146 147 for (i = 0; i < 2; ++i) { 148 ureg_MAD(shader, ureg_writemask(o_vmv[i], TGSI_WRITEMASK_XY), mv_scale, vmv[i], ureg_src(t_vpos)); 149 ureg_MUL(shader, ureg_writemask(o_vmv[i], TGSI_WRITEMASK_ZW), mv_scale, vmv[i]); 150 } 151 152 ureg_release_temporary(shader, t_vpos); 153 154 ureg_END(shader); 155 156 return ureg_create_shader_and_destroy(shader, r->pipe); 157} 158 159static void * 160create_ref_frag_shader(struct vl_mc *r) 161{ 162 const float y_scale = 163 r->buffer_height / 2 * 164 r->macroblock_size / VL_MACROBLOCK_HEIGHT; 165 166 struct ureg_program *shader; 167 struct ureg_src tc[2], sampler; 168 struct ureg_dst ref, field; 169 struct ureg_dst fragment; 170 unsigned label; 171 172 shader = ureg_create(PIPE_SHADER_FRAGMENT); 173 if (!shader) 174 return NULL; 175 176 tc[0] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP, TGSI_INTERPOLATE_LINEAR); 177 tc[1] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM, TGSI_INTERPOLATE_LINEAR); 178 179 sampler = ureg_DECL_sampler(shader, 0); 180 ref = ureg_DECL_temporary(shader); 181 182 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0); 183 184 field = calc_line(r->pipe->screen, shader); 185 186 /* 187 * ref = field.z ? tc[1] : tc[0] 188 * 189 * // Adjust tc acording to top/bottom field selection 190 * if (|ref.z|) { 191 * ref.y *= y_scale 192 * ref.y = floor(ref.y) 193 * ref.y += ref.z 194 * ref.y /= y_scale 195 * } 196 * fragment.xyz = tex(ref, sampler[0]) 197 */ 198 ureg_CMP(shader, ureg_writemask(ref, TGSI_WRITEMASK_XYZ), 199 ureg_negate(ureg_scalar(ureg_src(field), TGSI_SWIZZLE_Y)), 200 tc[1], tc[0]); 201 ureg_CMP(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), 202 ureg_negate(ureg_scalar(ureg_src(field), TGSI_SWIZZLE_Y)), 203 tc[1], tc[0]); 204 205 ureg_IF(shader, ureg_scalar(ureg_src(ref), TGSI_SWIZZLE_Z), &label); 206 207 ureg_MUL(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y), 208 ureg_src(ref), ureg_imm1f(shader, y_scale)); 209 ureg_FLR(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y), ureg_src(ref)); 210 ureg_ADD(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y), 211 ureg_src(ref), ureg_scalar(ureg_src(ref), TGSI_SWIZZLE_Z)); 212 ureg_MUL(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y), 213 ureg_src(ref), ureg_imm1f(shader, 1.0f / y_scale)); 214 215 ureg_fixup_label(shader, label, ureg_get_instruction_number(shader)); 216 ureg_ENDIF(shader); 217 218 ureg_TEX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ), TGSI_TEXTURE_2D, ureg_src(ref), sampler); 219 220 ureg_release_temporary(shader, ref); 221 222 ureg_release_temporary(shader, field); 223 ureg_END(shader); 224 225 return ureg_create_shader_and_destroy(shader, r->pipe); 226} 227 228static void * 229create_ycbcr_vert_shader(struct vl_mc *r, vl_mc_ycbcr_vert_shader vs_callback, void *callback_priv) 230{ 231 struct ureg_program *shader; 232 233 struct ureg_src vrect, vpos; 234 struct ureg_dst t_vpos, t_vtex; 235 struct ureg_dst o_vpos, o_flags; 236 237 struct vertex2f scale = { 238 (float)VL_BLOCK_WIDTH / r->buffer_width * VL_MACROBLOCK_WIDTH / r->macroblock_size, 239 (float)VL_BLOCK_HEIGHT / r->buffer_height * VL_MACROBLOCK_HEIGHT / r->macroblock_size 240 }; 241 242 unsigned label; 243 244 shader = ureg_create(PIPE_SHADER_VERTEX); 245 if (!shader) 246 return NULL; 247 248 vrect = ureg_DECL_vs_input(shader, VS_I_RECT); 249 vpos = ureg_DECL_vs_input(shader, VS_I_VPOS); 250 251 t_vpos = calc_position(r, shader, ureg_imm2f(shader, scale.x, scale.y)); 252 t_vtex = ureg_DECL_temporary(shader); 253 254 o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS); 255 o_flags = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_FLAGS); 256 257 /* 258 * o_vtex.xy = t_vpos 259 * o_flags.z = intra * 0.5 260 * 261 * if(interlaced) { 262 * t_vtex.xy = vrect.y ? { 0, scale.y } : { -scale.y : 0 } 263 * t_vtex.z = vpos.y % 2 264 * t_vtex.y = t_vtex.z ? t_vtex.x : t_vtex.y 265 * o_vpos.y = t_vtex.y + t_vpos.y 266 * 267 * o_flags.w = t_vtex.z ? 0 : 1 268 * } 269 * 270 */ 271 272 vs_callback(callback_priv, r, shader, VS_O_VTEX, t_vpos); 273 274 ureg_MUL(shader, ureg_writemask(o_flags, TGSI_WRITEMASK_Z), 275 ureg_scalar(vpos, TGSI_SWIZZLE_Z), ureg_imm1f(shader, 0.5f)); 276 ureg_MOV(shader, ureg_writemask(o_flags, TGSI_WRITEMASK_W), ureg_imm1f(shader, -1.0f)); 277 278 if (r->macroblock_size == VL_MACROBLOCK_HEIGHT) { //TODO 279 ureg_IF(shader, ureg_scalar(vpos, TGSI_SWIZZLE_W), &label); 280 281 ureg_CMP(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_XY), 282 ureg_negate(ureg_scalar(vrect, TGSI_SWIZZLE_Y)), 283 ureg_imm2f(shader, 0.0f, scale.y), 284 ureg_imm2f(shader, -scale.y, 0.0f)); 285 ureg_MUL(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_Z), 286 ureg_scalar(vpos, TGSI_SWIZZLE_Y), ureg_imm1f(shader, 0.5f)); 287 288 ureg_FRC(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_Z), ureg_src(t_vtex)); 289 290 ureg_CMP(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_Y), 291 ureg_negate(ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_Z)), 292 ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_X), 293 ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_Y)); 294 ureg_ADD(shader, ureg_writemask(o_vpos, TGSI_WRITEMASK_Y), 295 ureg_src(t_vpos), ureg_src(t_vtex)); 296 297 ureg_CMP(shader, ureg_writemask(o_flags, TGSI_WRITEMASK_W), 298 ureg_negate(ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_Z)), 299 ureg_imm1f(shader, 0.0f), ureg_imm1f(shader, 1.0f)); 300 301 ureg_fixup_label(shader, label, ureg_get_instruction_number(shader)); 302 ureg_ENDIF(shader); 303 } 304 305 ureg_release_temporary(shader, t_vtex); 306 ureg_release_temporary(shader, t_vpos); 307 308 ureg_END(shader); 309 310 return ureg_create_shader_and_destroy(shader, r->pipe); 311} 312 313static void * 314create_ycbcr_frag_shader(struct vl_mc *r, float scale, bool invert, 315 vl_mc_ycbcr_frag_shader fs_callback, void *callback_priv) 316{ 317 struct ureg_program *shader; 318 struct ureg_src flags; 319 struct ureg_dst tmp; 320 struct ureg_dst fragment; 321 unsigned label; 322 323 shader = ureg_create(PIPE_SHADER_FRAGMENT); 324 if (!shader) 325 return NULL; 326 327 flags = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_FLAGS, TGSI_INTERPOLATE_LINEAR); 328 329 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0); 330 331 tmp = calc_line(r->pipe->screen, shader); 332 333 /* 334 * if (field == tc.w) 335 * kill(); 336 * else { 337 * fragment.xyz = tex(tc, sampler) * scale + tc.z 338 * fragment.w = 1.0f 339 * } 340 */ 341 342 ureg_SEQ(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y), 343 ureg_scalar(flags, TGSI_SWIZZLE_W), ureg_src(tmp)); 344 345 ureg_IF(shader, ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y), &label); 346 347 ureg_KILL(shader); 348 349 ureg_fixup_label(shader, label, ureg_get_instruction_number(shader)); 350 ureg_ELSE(shader, &label); 351 352 fs_callback(callback_priv, r, shader, VS_O_VTEX, tmp); 353 354 if (scale != 1.0f) 355 ureg_MAD(shader, ureg_writemask(tmp, TGSI_WRITEMASK_XYZ), 356 ureg_src(tmp), ureg_imm1f(shader, scale), 357 ureg_scalar(flags, TGSI_SWIZZLE_Z)); 358 else 359 ureg_ADD(shader, ureg_writemask(tmp, TGSI_WRITEMASK_XYZ), 360 ureg_src(tmp), ureg_scalar(flags, TGSI_SWIZZLE_Z)); 361 362 ureg_MUL(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ), ureg_src(tmp), ureg_imm1f(shader, invert ? -1.0f : 1.0f)); 363 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f)); 364 365 ureg_fixup_label(shader, label, ureg_get_instruction_number(shader)); 366 ureg_ENDIF(shader); 367 368 ureg_release_temporary(shader, tmp); 369 370 ureg_END(shader); 371 372 return ureg_create_shader_and_destroy(shader, r->pipe); 373} 374 375static bool 376init_pipe_state(struct vl_mc *r) 377{ 378 struct pipe_sampler_state sampler; 379 struct pipe_blend_state blend; 380 struct pipe_rasterizer_state rs_state; 381 unsigned i; 382 383 assert(r); 384 385 memset(&sampler, 0, sizeof(sampler)); 386 sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE; 387 sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE; 388 sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_BORDER; 389 sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR; 390 sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE; 391 sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR; 392 sampler.compare_mode = PIPE_TEX_COMPARE_NONE; 393 sampler.compare_func = PIPE_FUNC_ALWAYS; 394 sampler.normalized_coords = 1; 395 r->sampler_ref = r->pipe->create_sampler_state(r->pipe, &sampler); 396 if (!r->sampler_ref) 397 goto error_sampler_ref; 398 399 for (i = 0; i < VL_MC_NUM_BLENDERS; ++i) { 400 memset(&blend, 0, sizeof blend); 401 blend.independent_blend_enable = 0; 402 blend.rt[0].blend_enable = 1; 403 blend.rt[0].rgb_func = PIPE_BLEND_ADD; 404 blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA; 405 blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ZERO; 406 blend.rt[0].alpha_func = PIPE_BLEND_ADD; 407 blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA; 408 blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ZERO; 409 blend.logicop_enable = 0; 410 blend.logicop_func = PIPE_LOGICOP_CLEAR; 411 blend.rt[0].colormask = i; 412 blend.dither = 0; 413 r->blend_clear[i] = r->pipe->create_blend_state(r->pipe, &blend); 414 if (!r->blend_clear[i]) 415 goto error_blend; 416 417 blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ONE; 418 blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE; 419 r->blend_add[i] = r->pipe->create_blend_state(r->pipe, &blend); 420 if (!r->blend_add[i]) 421 goto error_blend; 422 423 blend.rt[0].rgb_func = PIPE_BLEND_REVERSE_SUBTRACT; 424 blend.rt[0].alpha_dst_factor = PIPE_BLEND_REVERSE_SUBTRACT; 425 r->blend_sub[i] = r->pipe->create_blend_state(r->pipe, &blend); 426 if (!r->blend_sub[i]) 427 goto error_blend; 428 } 429 430 memset(&rs_state, 0, sizeof(rs_state)); 431 /*rs_state.sprite_coord_enable */ 432 rs_state.sprite_coord_mode = PIPE_SPRITE_COORD_UPPER_LEFT; 433 rs_state.point_quad_rasterization = true; 434 rs_state.point_size = VL_BLOCK_WIDTH; 435 rs_state.half_pixel_center = true; 436 rs_state.bottom_edge_rule = true; 437 rs_state.depth_clip_near = 1; 438 rs_state.depth_clip_far = 1; 439 440 r->rs_state = r->pipe->create_rasterizer_state(r->pipe, &rs_state); 441 if (!r->rs_state) 442 goto error_rs_state; 443 444 return true; 445 446error_rs_state: 447error_blend: 448 for (i = 0; i < VL_MC_NUM_BLENDERS; ++i) { 449 if (r->blend_sub[i]) 450 r->pipe->delete_blend_state(r->pipe, r->blend_sub[i]); 451 452 if (r->blend_add[i]) 453 r->pipe->delete_blend_state(r->pipe, r->blend_add[i]); 454 455 if (r->blend_clear[i]) 456 r->pipe->delete_blend_state(r->pipe, r->blend_clear[i]); 457 } 458 459 r->pipe->delete_sampler_state(r->pipe, r->sampler_ref); 460 461error_sampler_ref: 462 return false; 463} 464 465static void 466cleanup_pipe_state(struct vl_mc *r) 467{ 468 unsigned i; 469 470 assert(r); 471 472 r->pipe->delete_sampler_state(r->pipe, r->sampler_ref); 473 for (i = 0; i < VL_MC_NUM_BLENDERS; ++i) { 474 r->pipe->delete_blend_state(r->pipe, r->blend_clear[i]); 475 r->pipe->delete_blend_state(r->pipe, r->blend_add[i]); 476 r->pipe->delete_blend_state(r->pipe, r->blend_sub[i]); 477 } 478 r->pipe->delete_rasterizer_state(r->pipe, r->rs_state); 479} 480 481bool 482vl_mc_init(struct vl_mc *renderer, struct pipe_context *pipe, 483 unsigned buffer_width, unsigned buffer_height, 484 unsigned macroblock_size, float scale, 485 vl_mc_ycbcr_vert_shader vs_callback, 486 vl_mc_ycbcr_frag_shader fs_callback, 487 void *callback_priv) 488{ 489 assert(renderer); 490 assert(pipe); 491 492 memset(renderer, 0, sizeof(struct vl_mc)); 493 494 renderer->pipe = pipe; 495 renderer->buffer_width = buffer_width; 496 renderer->buffer_height = buffer_height; 497 renderer->macroblock_size = macroblock_size; 498 499 if (!init_pipe_state(renderer)) 500 goto error_pipe_state; 501 502 renderer->vs_ref = create_ref_vert_shader(renderer); 503 if (!renderer->vs_ref) 504 goto error_vs_ref; 505 506 renderer->vs_ycbcr = create_ycbcr_vert_shader(renderer, vs_callback, callback_priv); 507 if (!renderer->vs_ycbcr) 508 goto error_vs_ycbcr; 509 510 renderer->fs_ref = create_ref_frag_shader(renderer); 511 if (!renderer->fs_ref) 512 goto error_fs_ref; 513 514 renderer->fs_ycbcr = create_ycbcr_frag_shader(renderer, scale, false, fs_callback, callback_priv); 515 if (!renderer->fs_ycbcr) 516 goto error_fs_ycbcr; 517 518 renderer->fs_ycbcr_sub = create_ycbcr_frag_shader(renderer, scale, true, fs_callback, callback_priv); 519 if (!renderer->fs_ycbcr_sub) 520 goto error_fs_ycbcr_sub; 521 522 return true; 523 524error_fs_ycbcr_sub: 525 renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ycbcr); 526 527error_fs_ycbcr: 528 renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ref); 529 530error_fs_ref: 531 renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ycbcr); 532 533error_vs_ycbcr: 534 renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ref); 535 536error_vs_ref: 537 cleanup_pipe_state(renderer); 538 539error_pipe_state: 540 return false; 541} 542 543void 544vl_mc_cleanup(struct vl_mc *renderer) 545{ 546 assert(renderer); 547 548 cleanup_pipe_state(renderer); 549 550 renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ref); 551 renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ycbcr); 552 renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ref); 553 renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ycbcr); 554 renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ycbcr_sub); 555} 556 557bool 558vl_mc_init_buffer(struct vl_mc *renderer, struct vl_mc_buffer *buffer) 559{ 560 assert(renderer && buffer); 561 562 buffer->viewport.scale[2] = 1; 563 buffer->viewport.translate[0] = 0; 564 buffer->viewport.translate[1] = 0; 565 buffer->viewport.translate[2] = 0; 566 buffer->viewport.swizzle_x = PIPE_VIEWPORT_SWIZZLE_POSITIVE_X; 567 buffer->viewport.swizzle_y = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Y; 568 buffer->viewport.swizzle_z = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Z; 569 buffer->viewport.swizzle_w = PIPE_VIEWPORT_SWIZZLE_POSITIVE_W; 570 571 buffer->fb_state.nr_cbufs = 1; 572 buffer->fb_state.zsbuf = NULL; 573 574 return true; 575} 576 577void 578vl_mc_cleanup_buffer(struct vl_mc_buffer *buffer) 579{ 580 assert(buffer); 581} 582 583void 584vl_mc_set_surface(struct vl_mc_buffer *buffer, struct pipe_surface *surface) 585{ 586 assert(buffer && surface); 587 588 buffer->surface_cleared = false; 589 590 buffer->viewport.scale[0] = surface->width; 591 buffer->viewport.scale[1] = surface->height; 592 593 buffer->fb_state.width = surface->width; 594 buffer->fb_state.height = surface->height; 595 buffer->fb_state.cbufs[0] = surface; 596} 597 598static void 599prepare_pipe_4_rendering(struct vl_mc *renderer, struct vl_mc_buffer *buffer, unsigned mask) 600{ 601 assert(buffer); 602 603 renderer->pipe->bind_rasterizer_state(renderer->pipe, renderer->rs_state); 604 605 if (buffer->surface_cleared) 606 renderer->pipe->bind_blend_state(renderer->pipe, renderer->blend_add[mask]); 607 else 608 renderer->pipe->bind_blend_state(renderer->pipe, renderer->blend_clear[mask]); 609 610 renderer->pipe->set_framebuffer_state(renderer->pipe, &buffer->fb_state); 611 renderer->pipe->set_viewport_states(renderer->pipe, 0, 1, &buffer->viewport); 612} 613 614void 615vl_mc_render_ref(struct vl_mc *renderer, struct vl_mc_buffer *buffer, struct pipe_sampler_view *ref) 616{ 617 assert(buffer && ref); 618 619 prepare_pipe_4_rendering(renderer, buffer, PIPE_MASK_R | PIPE_MASK_G | PIPE_MASK_B); 620 621 renderer->pipe->bind_vs_state(renderer->pipe, renderer->vs_ref); 622 renderer->pipe->bind_fs_state(renderer->pipe, renderer->fs_ref); 623 624 renderer->pipe->set_sampler_views(renderer->pipe, PIPE_SHADER_FRAGMENT, 625 0, 1, 0, false, &ref); 626 renderer->pipe->bind_sampler_states(renderer->pipe, PIPE_SHADER_FRAGMENT, 627 0, 1, &renderer->sampler_ref); 628 629 util_draw_arrays_instanced(renderer->pipe, PIPE_PRIM_QUADS, 0, 4, 0, 630 renderer->buffer_width / VL_MACROBLOCK_WIDTH * 631 renderer->buffer_height / VL_MACROBLOCK_HEIGHT); 632 633 buffer->surface_cleared = true; 634} 635 636void 637vl_mc_render_ycbcr(struct vl_mc *renderer, struct vl_mc_buffer *buffer, unsigned component, unsigned num_instances) 638{ 639 unsigned mask = 1 << component; 640 641 assert(buffer); 642 643 if (num_instances == 0) 644 return; 645 646 prepare_pipe_4_rendering(renderer, buffer, mask); 647 648 renderer->pipe->bind_vs_state(renderer->pipe, renderer->vs_ycbcr); 649 renderer->pipe->bind_fs_state(renderer->pipe, renderer->fs_ycbcr); 650 651 util_draw_arrays_instanced(renderer->pipe, PIPE_PRIM_QUADS, 0, 4, 0, num_instances); 652 653 if (buffer->surface_cleared) { 654 renderer->pipe->bind_blend_state(renderer->pipe, renderer->blend_sub[mask]); 655 renderer->pipe->bind_fs_state(renderer->pipe, renderer->fs_ycbcr_sub); 656 util_draw_arrays_instanced(renderer->pipe, PIPE_PRIM_QUADS, 0, 4, 0, num_instances); 657 } 658} 659