1 /*
2 * - CrystalHD decoder module -
3 *
4 * Copyright(C) 2010,2011 Philip Langdale <ffmpeg.philipl@overt.org>
5 *
6 * This file is part of FFmpeg.
7 *
8 * FFmpeg is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * FFmpeg is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with FFmpeg; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22
23 /*
24 * - Principles of Operation -
25 *
26 * The CrystalHD decoder operates at the bitstream level - which is an even
27 * higher level than the decoding hardware you typically see in modern GPUs.
28 * This means it has a very simple interface, in principle. You feed demuxed
29 * packets in one end and get decoded picture (fields/frames) out the other.
30 *
31 * Of course, nothing is ever that simple. Due, at the very least, to b-frame
32 * dependencies in the supported formats, the hardware has a delay between
33 * when a packet goes in, and when a picture comes out. Furthermore, this delay
34 * is not just a function of time, but also one of the dependency on additional
35 * frames being fed into the decoder to satisfy the b-frame dependencies.
36 *
37 * As such, the hardware can only be used effectively with a decode API that
38 * doesn't assume a 1:1 relationship between input packets and output frames.
39 * The new avcodec decode API is such an API (an m:n API) while the old one is
40 * 1:1. Consequently, we no longer support the old API, which allows us to avoid
41 * the vicious hacks that are required to approximate 1:1 operation.
42 */
43
44 /*****************************************************************************
45 * Includes
46 ****************************************************************************/
47
48 #include "config_components.h"
49
50 #define _XOPEN_SOURCE 600
51 #include <inttypes.h>
52 #include <stdio.h>
53 #include <stdlib.h>
54
55 #include <libcrystalhd/bc_dts_types.h>
56 #include <libcrystalhd/bc_dts_defs.h>
57 #include <libcrystalhd/libcrystalhd_if.h>
58
59 #include "avcodec.h"
60 #include "codec_internal.h"
61 #include "decode.h"
62 #include "internal.h"
63 #include "libavutil/imgutils.h"
64 #include "libavutil/intreadwrite.h"
65 #include "libavutil/opt.h"
66
67 #if HAVE_UNISTD_H
68 #include <unistd.h>
69 #endif
70
71 /** Timeout parameter passed to DtsProcOutput() in us */
72 #define OUTPUT_PROC_TIMEOUT 50
73 /** Step between fake timestamps passed to hardware in units of 100ns */
74 #define TIMESTAMP_UNIT 100000
75
76
77 /*****************************************************************************
78 * Module private data
79 ****************************************************************************/
80
81 typedef enum {
82 RET_ERROR = -1,
83 RET_OK = 0,
84 RET_COPY_AGAIN = 1,
85 } CopyRet;
86
87 typedef struct OpaqueList {
88 struct OpaqueList *next;
89 uint64_t fake_timestamp;
90 uint64_t reordered_opaque;
91 } OpaqueList;
92
93 typedef struct {
94 AVClass *av_class;
95 AVCodecContext *avctx;
96 /* This packet coincides with AVCodecInternal.in_pkt
97 * and is not owned by us. */
98 AVPacket *pkt;
99 HANDLE dev;
100
101 uint8_t is_70012;
102 uint8_t need_second_field;
103 uint8_t draining;
104
105 OpaqueList *head;
106 OpaqueList *tail;
107
108 /* Options */
109 uint32_t sWidth;
110 } CHDContext;
111
112 static const AVOption options[] = {
113 { "crystalhd_downscale_width",
114 "Turn on downscaling to the specified width",
115 offsetof(CHDContext, sWidth),
116 AV_OPT_TYPE_INT, {.i64 = 0}, 0, UINT32_MAX,
117 AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM, },
118 { NULL, },
119 };
120
121
122 /*****************************************************************************
123 * Helper functions
124 ****************************************************************************/
125
id2subtype(CHDContext *priv, enum AVCodecID id)126 static inline BC_MEDIA_SUBTYPE id2subtype(CHDContext *priv, enum AVCodecID id)
127 {
128 switch (id) {
129 case AV_CODEC_ID_MPEG4:
130 return BC_MSUBTYPE_DIVX;
131 case AV_CODEC_ID_MSMPEG4V3:
132 return BC_MSUBTYPE_DIVX311;
133 case AV_CODEC_ID_MPEG2VIDEO:
134 return BC_MSUBTYPE_MPEG2VIDEO;
135 case AV_CODEC_ID_VC1:
136 return BC_MSUBTYPE_VC1;
137 case AV_CODEC_ID_WMV3:
138 return BC_MSUBTYPE_WMV3;
139 case AV_CODEC_ID_H264:
140 return BC_MSUBTYPE_H264;
141 default:
142 return BC_MSUBTYPE_INVALID;
143 }
144 }
145
print_frame_info(CHDContext *priv, BC_DTS_PROC_OUT *output)146 static inline void print_frame_info(CHDContext *priv, BC_DTS_PROC_OUT *output)
147 {
148 av_log(priv->avctx, AV_LOG_TRACE, "\tYBuffSz: %u\n", output->YbuffSz);
149 av_log(priv->avctx, AV_LOG_TRACE, "\tYBuffDoneSz: %u\n",
150 output->YBuffDoneSz);
151 av_log(priv->avctx, AV_LOG_TRACE, "\tUVBuffDoneSz: %u\n",
152 output->UVBuffDoneSz);
153 av_log(priv->avctx, AV_LOG_TRACE, "\tTimestamp: %"PRIu64"\n",
154 output->PicInfo.timeStamp);
155 av_log(priv->avctx, AV_LOG_TRACE, "\tPicture Number: %u\n",
156 output->PicInfo.picture_number);
157 av_log(priv->avctx, AV_LOG_TRACE, "\tWidth: %u\n",
158 output->PicInfo.width);
159 av_log(priv->avctx, AV_LOG_TRACE, "\tHeight: %u\n",
160 output->PicInfo.height);
161 av_log(priv->avctx, AV_LOG_TRACE, "\tChroma: 0x%03x\n",
162 output->PicInfo.chroma_format);
163 av_log(priv->avctx, AV_LOG_TRACE, "\tPulldown: %u\n",
164 output->PicInfo.pulldown);
165 av_log(priv->avctx, AV_LOG_TRACE, "\tFlags: 0x%08x\n",
166 output->PicInfo.flags);
167 av_log(priv->avctx, AV_LOG_TRACE, "\tFrame Rate/Res: %u\n",
168 output->PicInfo.frame_rate);
169 av_log(priv->avctx, AV_LOG_TRACE, "\tAspect Ratio: %u\n",
170 output->PicInfo.aspect_ratio);
171 av_log(priv->avctx, AV_LOG_TRACE, "\tColor Primaries: %u\n",
172 output->PicInfo.colour_primaries);
173 av_log(priv->avctx, AV_LOG_TRACE, "\tMetaData: %u\n",
174 output->PicInfo.picture_meta_payload);
175 av_log(priv->avctx, AV_LOG_TRACE, "\tSession Number: %u\n",
176 output->PicInfo.sess_num);
177 av_log(priv->avctx, AV_LOG_TRACE, "\tycom: %u\n",
178 output->PicInfo.ycom);
179 av_log(priv->avctx, AV_LOG_TRACE, "\tCustom Aspect: %u\n",
180 output->PicInfo.custom_aspect_ratio_width_height);
181 av_log(priv->avctx, AV_LOG_TRACE, "\tFrames to Drop: %u\n",
182 output->PicInfo.n_drop);
183 av_log(priv->avctx, AV_LOG_TRACE, "\tH264 Valid Fields: 0x%08x\n",
184 output->PicInfo.other.h264.valid);
185 }
186
187
188 /*****************************************************************************
189 * OpaqueList functions
190 ****************************************************************************/
191
opaque_list_push(CHDContext *priv, uint64_t reordered_opaque)192 static uint64_t opaque_list_push(CHDContext *priv, uint64_t reordered_opaque)
193 {
194 OpaqueList *newNode = av_mallocz(sizeof (OpaqueList));
195 if (!newNode) {
196 av_log(priv->avctx, AV_LOG_ERROR,
197 "Unable to allocate new node in OpaqueList.\n");
198 return 0;
199 }
200 if (!priv->head) {
201 newNode->fake_timestamp = TIMESTAMP_UNIT;
202 priv->head = newNode;
203 } else {
204 newNode->fake_timestamp = priv->tail->fake_timestamp + TIMESTAMP_UNIT;
205 priv->tail->next = newNode;
206 }
207 priv->tail = newNode;
208 newNode->reordered_opaque = reordered_opaque;
209
210 return newNode->fake_timestamp;
211 }
212
213 /*
214 * The OpaqueList is built in decode order, while elements will be removed
215 * in presentation order. If frames are reordered, this means we must be
216 * able to remove elements that are not the first element.
217 *
218 * Returned node must be freed by caller.
219 */
opaque_list_pop(CHDContext *priv, uint64_t fake_timestamp)220 static OpaqueList *opaque_list_pop(CHDContext *priv, uint64_t fake_timestamp)
221 {
222 OpaqueList *node = priv->head;
223
224 if (!priv->head) {
225 av_log(priv->avctx, AV_LOG_ERROR,
226 "CrystalHD: Attempted to query non-existent timestamps.\n");
227 return NULL;
228 }
229
230 /*
231 * The first element is special-cased because we have to manipulate
232 * the head pointer rather than the previous element in the list.
233 */
234 if (priv->head->fake_timestamp == fake_timestamp) {
235 priv->head = node->next;
236
237 if (!priv->head->next)
238 priv->tail = priv->head;
239
240 node->next = NULL;
241 return node;
242 }
243
244 /*
245 * The list is processed at arm's length so that we have the
246 * previous element available to rewrite its next pointer.
247 */
248 while (node->next) {
249 OpaqueList *current = node->next;
250 if (current->fake_timestamp == fake_timestamp) {
251 node->next = current->next;
252
253 if (!node->next)
254 priv->tail = node;
255
256 current->next = NULL;
257 return current;
258 } else {
259 node = current;
260 }
261 }
262
263 av_log(priv->avctx, AV_LOG_VERBOSE,
264 "CrystalHD: Couldn't match fake_timestamp.\n");
265 return NULL;
266 }
267
268
269 /*****************************************************************************
270 * Video decoder API function definitions
271 ****************************************************************************/
272
flush(AVCodecContext *avctx)273 static void flush(AVCodecContext *avctx)
274 {
275 CHDContext *priv = avctx->priv_data;
276
277 priv->need_second_field = 0;
278 priv->draining = 0;
279
280 /* Flush mode 4 flushes all software and hardware buffers. */
281 DtsFlushInput(priv->dev, 4);
282 }
283
284
uninit(AVCodecContext *avctx)285 static av_cold int uninit(AVCodecContext *avctx)
286 {
287 CHDContext *priv = avctx->priv_data;
288 HANDLE device;
289
290 device = priv->dev;
291 DtsStopDecoder(device);
292 DtsCloseDecoder(device);
293 DtsDeviceClose(device);
294
295 if (priv->head) {
296 OpaqueList *node = priv->head;
297 while (node) {
298 OpaqueList *next = node->next;
299 av_free(node);
300 node = next;
301 }
302 }
303
304 return 0;
305 }
306
init(AVCodecContext *avctx)307 static av_cold int init(AVCodecContext *avctx)
308 {
309 CHDContext* priv;
310 BC_STATUS ret;
311 BC_INFO_CRYSTAL version;
312 BC_INPUT_FORMAT format = {
313 .FGTEnable = FALSE,
314 .Progressive = TRUE,
315 .OptFlags = 0x80000000 | vdecFrameRate59_94 | 0x40,
316 .width = avctx->width,
317 .height = avctx->height,
318 };
319
320 BC_MEDIA_SUBTYPE subtype;
321
322 uint32_t mode = DTS_PLAYBACK_MODE |
323 DTS_LOAD_FILE_PLAY_FW |
324 DTS_SKIP_TX_CHK_CPB |
325 DTS_PLAYBACK_DROP_RPT_MODE |
326 DTS_SINGLE_THREADED_MODE |
327 DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);
328
329 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
330 avctx->codec->name);
331
332 avctx->pix_fmt = AV_PIX_FMT_YUYV422;
333
334 /* Initialize the library */
335 priv = avctx->priv_data;
336 priv->avctx = avctx;
337 priv->pkt = avctx->internal->in_pkt;
338 priv->draining = 0;
339
340 subtype = id2subtype(priv, avctx->codec->id);
341 switch (subtype) {
342 case BC_MSUBTYPE_H264:
343 format.startCodeSz = 4;
344 // Fall-through
345 case BC_MSUBTYPE_VC1:
346 case BC_MSUBTYPE_WVC1:
347 case BC_MSUBTYPE_WMV3:
348 case BC_MSUBTYPE_WMVA:
349 case BC_MSUBTYPE_MPEG2VIDEO:
350 case BC_MSUBTYPE_DIVX:
351 case BC_MSUBTYPE_DIVX311:
352 format.pMetaData = avctx->extradata;
353 format.metaDataSz = avctx->extradata_size;
354 break;
355 default:
356 av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
357 return AVERROR(EINVAL);
358 }
359 format.mSubtype = subtype;
360
361 if (priv->sWidth) {
362 format.bEnableScaling = 1;
363 format.ScalingParams.sWidth = priv->sWidth;
364 }
365
366 /* Get a decoder instance */
367 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
368 // Initialize the Link and Decoder devices
369 ret = DtsDeviceOpen(&priv->dev, mode);
370 if (ret != BC_STS_SUCCESS) {
371 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
372 goto fail;
373 }
374
375 ret = DtsCrystalHDVersion(priv->dev, &version);
376 if (ret != BC_STS_SUCCESS) {
377 av_log(avctx, AV_LOG_VERBOSE,
378 "CrystalHD: DtsCrystalHDVersion failed\n");
379 goto fail;
380 }
381 priv->is_70012 = version.device == 0;
382
383 if (priv->is_70012 &&
384 (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311)) {
385 av_log(avctx, AV_LOG_VERBOSE,
386 "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
387 goto fail;
388 }
389
390 ret = DtsSetInputFormat(priv->dev, &format);
391 if (ret != BC_STS_SUCCESS) {
392 av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
393 goto fail;
394 }
395
396 ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
397 if (ret != BC_STS_SUCCESS) {
398 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
399 goto fail;
400 }
401
402 ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
403 if (ret != BC_STS_SUCCESS) {
404 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
405 goto fail;
406 }
407 ret = DtsStartDecoder(priv->dev);
408 if (ret != BC_STS_SUCCESS) {
409 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
410 goto fail;
411 }
412 ret = DtsStartCapture(priv->dev);
413 if (ret != BC_STS_SUCCESS) {
414 av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
415 goto fail;
416 }
417
418 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");
419
420 return 0;
421
422 fail:
423 uninit(avctx);
424 return -1;
425 }
426
427
copy_frame(AVCodecContext *avctx, BC_DTS_PROC_OUT *output, AVFrame *frame, int *got_frame)428 static inline CopyRet copy_frame(AVCodecContext *avctx,
429 BC_DTS_PROC_OUT *output,
430 AVFrame *frame, int *got_frame)
431 {
432 BC_STATUS ret;
433 BC_DTS_STATUS decoder_status = { 0, };
434 uint8_t interlaced;
435
436 CHDContext *priv = avctx->priv_data;
437 int64_t pkt_pts = AV_NOPTS_VALUE;
438
439 uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
440 VDEC_FLAG_BOTTOMFIELD;
441 uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);
442
443 int width = output->PicInfo.width;
444 int height = output->PicInfo.height;
445 int bwidth;
446 uint8_t *src = output->Ybuff;
447 int sStride;
448 uint8_t *dst;
449 int dStride;
450
451 if (output->PicInfo.timeStamp != 0) {
452 OpaqueList *node = opaque_list_pop(priv, output->PicInfo.timeStamp);
453 if (node) {
454 pkt_pts = node->reordered_opaque;
455 av_free(node);
456 } else {
457 /*
458 * We will encounter a situation where a timestamp cannot be
459 * popped if a second field is being returned. In this case,
460 * each field has the same timestamp and the first one will
461 * cause it to be popped. We'll avoid overwriting the valid
462 * timestamp below.
463 */
464 }
465 av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
466 output->PicInfo.timeStamp);
467 }
468
469 ret = DtsGetDriverStatus(priv->dev, &decoder_status);
470 if (ret != BC_STS_SUCCESS) {
471 av_log(avctx, AV_LOG_ERROR,
472 "CrystalHD: GetDriverStatus failed: %u\n", ret);
473 return RET_ERROR;
474 }
475
476 interlaced = output->PicInfo.flags & VDEC_FLAG_INTERLACED_SRC;
477
478 av_log(avctx, AV_LOG_VERBOSE, "Interlaced state: %d\n",
479 interlaced);
480
481 priv->need_second_field = interlaced && !priv->need_second_field;
482
483 if (!frame->data[0]) {
484 if (ff_get_buffer(avctx, frame, 0) < 0)
485 return RET_ERROR;
486 }
487
488 bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
489 if (bwidth < 0)
490 return RET_ERROR;
491
492 if (priv->is_70012) {
493 int pStride;
494
495 if (width <= 720)
496 pStride = 720;
497 else if (width <= 1280)
498 pStride = 1280;
499 else pStride = 1920;
500 sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
501 if (sStride < 0)
502 return RET_ERROR;
503 } else {
504 sStride = bwidth;
505 }
506
507 dStride = frame->linesize[0];
508 dst = frame->data[0];
509
510 av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");
511
512 /*
513 * The hardware doesn't return the first sample of a picture.
514 * Ignoring why it behaves this way, it's better to copy the sample from
515 * the second line, rather than the next sample across because the chroma
516 * values should be correct (assuming the decoded video was 4:2:0, which
517 * it was).
518 */
519 *((uint32_t *)src) = *((uint32_t *)(src + sStride));
520
521 if (interlaced) {
522 int dY = 0;
523 int sY = 0;
524
525 height /= 2;
526 if (bottom_field) {
527 av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
528 dY = 1;
529 } else {
530 av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
531 dY = 0;
532 }
533
534 for (sY = 0; sY < height; dY++, sY++) {
535 memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
536 dY++;
537 }
538 } else {
539 av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
540 }
541
542 frame->interlaced_frame = interlaced;
543 if (interlaced)
544 frame->top_field_first = !bottom_first;
545
546 frame->pts = pkt_pts;
547
548 frame->pkt_pos = -1;
549 frame->pkt_duration = 0;
550 frame->pkt_size = -1;
551
552 if (!priv->need_second_field) {
553 *got_frame = 1;
554 } else {
555 return RET_COPY_AGAIN;
556 }
557
558 return RET_OK;
559 }
560
561
receive_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame)562 static inline CopyRet receive_frame(AVCodecContext *avctx,
563 AVFrame *frame, int *got_frame)
564 {
565 BC_STATUS ret;
566 BC_DTS_PROC_OUT output = {
567 .PicInfo.width = avctx->width,
568 .PicInfo.height = avctx->height,
569 };
570 CHDContext *priv = avctx->priv_data;
571 HANDLE dev = priv->dev;
572
573 *got_frame = 0;
574
575 // Request decoded data from the driver
576 ret = DtsProcOutputNoCopy(dev, OUTPUT_PROC_TIMEOUT, &output);
577 if (ret == BC_STS_FMT_CHANGE) {
578 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Initial format change\n");
579 avctx->width = output.PicInfo.width;
580 avctx->height = output.PicInfo.height;
581 switch ( output.PicInfo.aspect_ratio ) {
582 case vdecAspectRatioSquare:
583 avctx->sample_aspect_ratio = (AVRational) { 1, 1};
584 break;
585 case vdecAspectRatio12_11:
586 avctx->sample_aspect_ratio = (AVRational) { 12, 11};
587 break;
588 case vdecAspectRatio10_11:
589 avctx->sample_aspect_ratio = (AVRational) { 10, 11};
590 break;
591 case vdecAspectRatio16_11:
592 avctx->sample_aspect_ratio = (AVRational) { 16, 11};
593 break;
594 case vdecAspectRatio40_33:
595 avctx->sample_aspect_ratio = (AVRational) { 40, 33};
596 break;
597 case vdecAspectRatio24_11:
598 avctx->sample_aspect_ratio = (AVRational) { 24, 11};
599 break;
600 case vdecAspectRatio20_11:
601 avctx->sample_aspect_ratio = (AVRational) { 20, 11};
602 break;
603 case vdecAspectRatio32_11:
604 avctx->sample_aspect_ratio = (AVRational) { 32, 11};
605 break;
606 case vdecAspectRatio80_33:
607 avctx->sample_aspect_ratio = (AVRational) { 80, 33};
608 break;
609 case vdecAspectRatio18_11:
610 avctx->sample_aspect_ratio = (AVRational) { 18, 11};
611 break;
612 case vdecAspectRatio15_11:
613 avctx->sample_aspect_ratio = (AVRational) { 15, 11};
614 break;
615 case vdecAspectRatio64_33:
616 avctx->sample_aspect_ratio = (AVRational) { 64, 33};
617 break;
618 case vdecAspectRatio160_99:
619 avctx->sample_aspect_ratio = (AVRational) {160, 99};
620 break;
621 case vdecAspectRatio4_3:
622 avctx->sample_aspect_ratio = (AVRational) { 4, 3};
623 break;
624 case vdecAspectRatio16_9:
625 avctx->sample_aspect_ratio = (AVRational) { 16, 9};
626 break;
627 case vdecAspectRatio221_1:
628 avctx->sample_aspect_ratio = (AVRational) {221, 1};
629 break;
630 }
631 return RET_COPY_AGAIN;
632 } else if (ret == BC_STS_SUCCESS) {
633 int copy_ret = -1;
634 if (output.PoutFlags & BC_POUT_FLAGS_PIB_VALID) {
635 print_frame_info(priv, &output);
636
637 copy_ret = copy_frame(avctx, &output, frame, got_frame);
638 } else {
639 /*
640 * An invalid frame has been consumed.
641 */
642 av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput succeeded with "
643 "invalid PIB\n");
644 copy_ret = RET_COPY_AGAIN;
645 }
646 DtsReleaseOutputBuffs(dev, NULL, FALSE);
647
648 return copy_ret;
649 } else if (ret == BC_STS_BUSY) {
650 return RET_COPY_AGAIN;
651 } else {
652 av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput failed %d\n", ret);
653 return RET_ERROR;
654 }
655 }
656
crystalhd_decode_packet(AVCodecContext *avctx, const AVPacket *avpkt)657 static int crystalhd_decode_packet(AVCodecContext *avctx, const AVPacket *avpkt)
658 {
659 BC_STATUS bc_ret;
660 CHDContext *priv = avctx->priv_data;
661 HANDLE dev = priv->dev;
662 int ret = 0;
663
664 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: decode_packet\n");
665
666 if (avpkt && avpkt->size) {
667 uint64_t pts;
668
669 /*
670 * Despite being notionally opaque, either libcrystalhd or
671 * the hardware itself will mangle pts values that are too
672 * small or too large. The docs claim it should be in units
673 * of 100ns. Given that we're nominally dealing with a black
674 * box on both sides, any transform we do has no guarantee of
675 * avoiding mangling so we need to build a mapping to values
676 * we know will not be mangled.
677 */
678 pts = opaque_list_push(priv, avpkt->pts);
679 if (!pts) {
680 ret = AVERROR(ENOMEM);
681 goto exit;
682 }
683 av_log(priv->avctx, AV_LOG_VERBOSE,
684 "input \"pts\": %"PRIu64"\n", pts);
685 bc_ret = DtsProcInput(dev, avpkt->data, avpkt->size, pts, 0);
686 if (bc_ret == BC_STS_BUSY) {
687 av_log(avctx, AV_LOG_WARNING,
688 "CrystalHD: ProcInput returned busy\n");
689 ret = AVERROR(EAGAIN);
690 goto exit;
691 } else if (bc_ret != BC_STS_SUCCESS) {
692 av_log(avctx, AV_LOG_ERROR,
693 "CrystalHD: ProcInput failed: %u\n", ret);
694 ret = -1;
695 goto exit;
696 }
697 } else {
698 av_log(avctx, AV_LOG_INFO, "CrystalHD: No more input data\n");
699 priv->draining = 1;
700 ret = AVERROR_EOF;
701 goto exit;
702 }
703 exit:
704 return ret;
705 }
706
crystalhd_receive_frame(AVCodecContext *avctx, AVFrame *frame)707 static int crystalhd_receive_frame(AVCodecContext *avctx, AVFrame *frame)
708 {
709 BC_STATUS bc_ret;
710 BC_DTS_STATUS decoder_status = { 0, };
711 CopyRet rec_ret;
712 CHDContext *priv = avctx->priv_data;
713 AVPacket *const pkt = priv->pkt;
714 HANDLE dev = priv->dev;
715 int got_frame = 0;
716 int ret = 0;
717
718 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: receive_frame\n");
719
720 ret = ff_decode_get_packet(avctx, pkt);
721 if (ret < 0 && ret != AVERROR_EOF) {
722 return ret;
723 }
724
725 while (pkt->size > DtsTxFreeSize(dev)) {
726 /*
727 * Block until there is space in the buffer for the next packet.
728 * We assume that the hardware will make forward progress at this
729 * point, although in pathological cases that may not happen.
730 */
731 av_log(avctx, AV_LOG_TRACE, "CrystalHD: Waiting for space in input buffer\n");
732 }
733
734 ret = crystalhd_decode_packet(avctx, pkt);
735 av_packet_unref(pkt);
736 // crystalhd_is_buffer_full() should avoid this.
737 if (ret == AVERROR(EAGAIN)) {
738 ret = AVERROR_EXTERNAL;
739 }
740 if (ret < 0 && ret != AVERROR_EOF) {
741 return ret;
742 }
743
744 do {
745 bc_ret = DtsGetDriverStatus(dev, &decoder_status);
746 if (bc_ret != BC_STS_SUCCESS) {
747 av_log(avctx, AV_LOG_ERROR, "CrystalHD: GetDriverStatus failed\n");
748 return -1;
749 }
750
751 if (decoder_status.ReadyListCount == 0) {
752 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Insufficient frames ready. Returning\n");
753 got_frame = 0;
754 rec_ret = RET_OK;
755 break;
756 }
757
758 rec_ret = receive_frame(avctx, frame, &got_frame);
759 } while (rec_ret == RET_COPY_AGAIN);
760
761 if (rec_ret == RET_ERROR) {
762 return -1;
763 } else if (got_frame == 0) {
764 return priv->draining ? AVERROR_EOF : AVERROR(EAGAIN);
765 } else {
766 return 0;
767 }
768 }
769
770 #define DEFINE_CRYSTALHD_DECODER(x, X, bsf_name) \
771 static const AVClass x##_crystalhd_class = { \
772 .class_name = #x "_crystalhd", \
773 .item_name = av_default_item_name, \
774 .option = options, \
775 .version = LIBAVUTIL_VERSION_INT, \
776 }; \
777 const FFCodec ff_##x##_crystalhd_decoder = { \
778 .p.name = #x "_crystalhd", \
779 .p.long_name = NULL_IF_CONFIG_SMALL("CrystalHD " #X " decoder"), \
780 .p.type = AVMEDIA_TYPE_VIDEO, \
781 .p.id = AV_CODEC_ID_##X, \
782 .priv_data_size = sizeof(CHDContext), \
783 .p.priv_class = &x##_crystalhd_class, \
784 .init = init, \
785 .close = uninit, \
786 FF_CODEC_RECEIVE_FRAME_CB(crystalhd_receive_frame), \
787 .flush = flush, \
788 .bsfs = bsf_name, \
789 .p.capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AVOID_PROBING | AV_CODEC_CAP_HARDWARE, \
790 .caps_internal = FF_CODEC_CAP_SETS_FRAME_PROPS, \
791 .p.pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE}, \
792 .p.wrapper_name = "crystalhd", \
793 };
794
795 #if CONFIG_H264_CRYSTALHD_DECODER
796 DEFINE_CRYSTALHD_DECODER(h264, H264, "h264_mp4toannexb")
797 #endif
798
799 #if CONFIG_MPEG2_CRYSTALHD_DECODER
800 DEFINE_CRYSTALHD_DECODER(mpeg2, MPEG2VIDEO, NULL)
801 #endif
802
803 #if CONFIG_MPEG4_CRYSTALHD_DECODER
804 DEFINE_CRYSTALHD_DECODER(mpeg4, MPEG4, "mpeg4_unpack_bframes")
805 #endif
806
807 #if CONFIG_MSMPEG4_CRYSTALHD_DECODER
808 DEFINE_CRYSTALHD_DECODER(msmpeg4, MSMPEG4V3, NULL)
809 #endif
810
811 #if CONFIG_VC1_CRYSTALHD_DECODER
812 DEFINE_CRYSTALHD_DECODER(vc1, VC1, NULL)
813 #endif
814
815 #if CONFIG_WMV3_CRYSTALHD_DECODER
816 DEFINE_CRYSTALHD_DECODER(wmv3, WMV3, NULL)
817 #endif
818