1 /*
2 * Copyright (c) 2010, Google, Inc.
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21 /**
22 * @file
23 * VP8/9 decoder support via libvpx
24 */
25
26 #include "config_components.h"
27
28 #define VPX_CODEC_DISABLE_COMPAT 1
29 #include <vpx/vpx_decoder.h>
30 #include <vpx/vpx_frame_buffer.h>
31 #include <vpx/vp8dx.h>
32
33 #include "libavutil/common.h"
34 #include "libavutil/cpu.h"
35 #include "libavutil/imgutils.h"
36 #include "libavutil/intreadwrite.h"
37 #include "avcodec.h"
38 #include "codec_internal.h"
39 #include "decode.h"
40 #include "internal.h"
41 #include "libvpx.h"
42 #include "profiles.h"
43
44 typedef struct VPxDecoderContext {
45 struct vpx_codec_ctx decoder;
46 struct vpx_codec_ctx decoder_alpha;
47 AVBufferPool *pool;
48 size_t pool_size;
49 int has_alpha_channel;
50 } VPxContext;
51
52
get_frame_buffer(void *priv, size_t min_size, vpx_codec_frame_buffer_t *fb)53 static int get_frame_buffer(void *priv, size_t min_size, vpx_codec_frame_buffer_t *fb)
54 {
55 VPxContext *ctx = priv;
56 AVBufferRef *buf;
57
58 if (min_size > ctx->pool_size) {
59 av_buffer_pool_uninit(&ctx->pool);
60 /* According to the libvpx docs the buffer must be zeroed out. */
61 ctx->pool = av_buffer_pool_init(min_size, av_buffer_allocz);
62 if (!ctx->pool) {
63 ctx->pool_size = 0;
64 return AVERROR(ENOMEM);
65 }
66 ctx->pool_size = min_size;
67 }
68
69 buf = av_buffer_pool_get(ctx->pool);
70 if (!buf)
71 return AVERROR(ENOMEM);
72
73 fb->priv = buf;
74 fb->size = ctx->pool_size;
75 fb->data = buf->data;
76
77 return 0;
78 }
79
release_frame_buffer(void *priv, vpx_codec_frame_buffer_t *fb)80 static int release_frame_buffer(void *priv, vpx_codec_frame_buffer_t *fb)
81 {
82 AVBufferRef *buf = fb->priv;
83 av_buffer_unref(&buf);
84 return 0;
85 }
86
vpx_init(AVCodecContext *avctx, struct vpx_codec_ctx* decoder, const struct vpx_codec_iface *iface)87 static av_cold int vpx_init(AVCodecContext *avctx,
88 struct vpx_codec_ctx* decoder,
89 const struct vpx_codec_iface *iface)
90 {
91 struct vpx_codec_dec_cfg deccfg = {
92 .threads = FFMIN(avctx->thread_count ? avctx->thread_count : av_cpu_count(), 16)
93 };
94
95 av_log(avctx, AV_LOG_INFO, "%s\n", vpx_codec_version_str());
96 av_log(avctx, AV_LOG_VERBOSE, "%s\n", vpx_codec_build_config());
97
98 if (vpx_codec_dec_init(decoder, iface, &deccfg, 0) != VPX_CODEC_OK) {
99 const char *error = vpx_codec_error(decoder);
100 av_log(avctx, AV_LOG_ERROR, "Failed to initialize decoder: %s\n",
101 error);
102 return AVERROR(EINVAL);
103 }
104
105 if (avctx->codec_id == AV_CODEC_ID_VP9)
106 vpx_codec_set_frame_buffer_functions(decoder, get_frame_buffer, release_frame_buffer, avctx->priv_data);
107
108 return 0;
109 }
110
111 // returns 0 on success, AVERROR_INVALIDDATA otherwise
set_pix_fmt(AVCodecContext *avctx, struct vpx_image *img, int has_alpha_channel)112 static int set_pix_fmt(AVCodecContext *avctx, struct vpx_image *img,
113 int has_alpha_channel)
114 {
115 static const enum AVColorSpace colorspaces[8] = {
116 AVCOL_SPC_UNSPECIFIED, AVCOL_SPC_BT470BG, AVCOL_SPC_BT709, AVCOL_SPC_SMPTE170M,
117 AVCOL_SPC_SMPTE240M, AVCOL_SPC_BT2020_NCL, AVCOL_SPC_RESERVED, AVCOL_SPC_RGB,
118 };
119 #if VPX_IMAGE_ABI_VERSION >= 4
120 static const enum AVColorRange color_ranges[] = {
121 AVCOL_RANGE_MPEG, AVCOL_RANGE_JPEG
122 };
123 avctx->color_range = color_ranges[img->range];
124 #endif
125 avctx->colorspace = colorspaces[img->cs];
126 if (avctx->codec_id == AV_CODEC_ID_VP8 && img->fmt != VPX_IMG_FMT_I420)
127 return AVERROR_INVALIDDATA;
128 switch (img->fmt) {
129 case VPX_IMG_FMT_I420:
130 if (avctx->codec_id == AV_CODEC_ID_VP9)
131 avctx->profile = FF_PROFILE_VP9_0;
132 avctx->pix_fmt =
133 has_alpha_channel ? AV_PIX_FMT_YUVA420P : AV_PIX_FMT_YUV420P;
134 return 0;
135 #if CONFIG_LIBVPX_VP9_DECODER
136 case VPX_IMG_FMT_I422:
137 avctx->profile = FF_PROFILE_VP9_1;
138 avctx->pix_fmt = AV_PIX_FMT_YUV422P;
139 return 0;
140 case VPX_IMG_FMT_I440:
141 avctx->profile = FF_PROFILE_VP9_1;
142 avctx->pix_fmt = AV_PIX_FMT_YUV440P;
143 return 0;
144 case VPX_IMG_FMT_I444:
145 avctx->profile = FF_PROFILE_VP9_1;
146 avctx->pix_fmt = avctx->colorspace == AVCOL_SPC_RGB ?
147 AV_PIX_FMT_GBRP : AV_PIX_FMT_YUV444P;
148 return 0;
149 case VPX_IMG_FMT_I42016:
150 avctx->profile = FF_PROFILE_VP9_2;
151 if (img->bit_depth == 10) {
152 avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
153 return 0;
154 } else if (img->bit_depth == 12) {
155 avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
156 return 0;
157 } else {
158 return AVERROR_INVALIDDATA;
159 }
160 case VPX_IMG_FMT_I42216:
161 avctx->profile = FF_PROFILE_VP9_3;
162 if (img->bit_depth == 10) {
163 avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
164 return 0;
165 } else if (img->bit_depth == 12) {
166 avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
167 return 0;
168 } else {
169 return AVERROR_INVALIDDATA;
170 }
171 case VPX_IMG_FMT_I44016:
172 avctx->profile = FF_PROFILE_VP9_3;
173 if (img->bit_depth == 10) {
174 avctx->pix_fmt = AV_PIX_FMT_YUV440P10;
175 return 0;
176 } else if (img->bit_depth == 12) {
177 avctx->pix_fmt = AV_PIX_FMT_YUV440P12;
178 return 0;
179 } else {
180 return AVERROR_INVALIDDATA;
181 }
182 case VPX_IMG_FMT_I44416:
183 avctx->profile = FF_PROFILE_VP9_3;
184 if (img->bit_depth == 10) {
185 avctx->pix_fmt = avctx->colorspace == AVCOL_SPC_RGB ?
186 AV_PIX_FMT_GBRP10 : AV_PIX_FMT_YUV444P10;
187 return 0;
188 } else if (img->bit_depth == 12) {
189 avctx->pix_fmt = avctx->colorspace == AVCOL_SPC_RGB ?
190 AV_PIX_FMT_GBRP12 : AV_PIX_FMT_YUV444P12;
191 return 0;
192 } else {
193 return AVERROR_INVALIDDATA;
194 }
195 #endif
196 default:
197 return AVERROR_INVALIDDATA;
198 }
199 }
200
decode_frame(AVCodecContext *avctx, vpx_codec_ctx_t *decoder, const uint8_t *data, uint32_t data_sz)201 static int decode_frame(AVCodecContext *avctx, vpx_codec_ctx_t *decoder,
202 const uint8_t *data, uint32_t data_sz)
203 {
204 if (vpx_codec_decode(decoder, data, data_sz, NULL, 0) != VPX_CODEC_OK) {
205 const char *error = vpx_codec_error(decoder);
206 const char *detail = vpx_codec_error_detail(decoder);
207
208 av_log(avctx, AV_LOG_ERROR, "Failed to decode frame: %s\n", error);
209 if (detail) {
210 av_log(avctx, AV_LOG_ERROR, " Additional information: %s\n",
211 detail);
212 }
213 return AVERROR_INVALIDDATA;
214 }
215 return 0;
216 }
217
vpx_decode(AVCodecContext *avctx, AVFrame *picture, int *got_frame, AVPacket *avpkt)218 static int vpx_decode(AVCodecContext *avctx, AVFrame *picture,
219 int *got_frame, AVPacket *avpkt)
220 {
221 VPxContext *ctx = avctx->priv_data;
222 const void *iter = NULL;
223 const void *iter_alpha = NULL;
224 struct vpx_image *img, *img_alpha;
225 int ret;
226 uint8_t *side_data = NULL;
227 size_t side_data_size;
228
229 ret = decode_frame(avctx, &ctx->decoder, avpkt->data, avpkt->size);
230 if (ret)
231 return ret;
232
233 side_data = av_packet_get_side_data(avpkt,
234 AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL,
235 &side_data_size);
236 if (side_data_size >= 8) {
237 const uint64_t additional_id = AV_RB64(side_data);
238 side_data += 8;
239 side_data_size -= 8;
240 if (additional_id == 1) { // 1 stands for alpha channel data.
241 if (!ctx->has_alpha_channel) {
242 ctx->has_alpha_channel = 1;
243 ret = vpx_init(avctx,
244 &ctx->decoder_alpha,
245 #if CONFIG_LIBVPX_VP8_DECODER && CONFIG_LIBVPX_VP9_DECODER
246 (avctx->codec_id == AV_CODEC_ID_VP8) ?
247 vpx_codec_vp8_dx() : vpx_codec_vp9_dx()
248 #elif CONFIG_LIBVPX_VP8_DECODER
249 vpx_codec_vp8_dx()
250 #else
251 vpx_codec_vp9_dx()
252 #endif
253 );
254 if (ret)
255 return ret;
256 }
257 ret = decode_frame(avctx, &ctx->decoder_alpha, side_data,
258 side_data_size);
259 if (ret)
260 return ret;
261 }
262 }
263
264 if ((img = vpx_codec_get_frame(&ctx->decoder, &iter)) &&
265 (!ctx->has_alpha_channel ||
266 (img_alpha = vpx_codec_get_frame(&ctx->decoder_alpha, &iter_alpha)))) {
267 uint8_t *planes[4];
268 int linesizes[4];
269
270 if (img->d_w > img->w || img->d_h > img->h) {
271 av_log(avctx, AV_LOG_ERROR, "Display dimensions %dx%d exceed storage %dx%d\n",
272 img->d_w, img->d_h, img->w, img->h);
273 return AVERROR_EXTERNAL;
274 }
275
276 if ((ret = set_pix_fmt(avctx, img, ctx->has_alpha_channel)) < 0) {
277 av_log(avctx, AV_LOG_ERROR, "Unsupported output colorspace (%d) / bit_depth (%d)\n",
278 img->fmt, img->bit_depth);
279 return ret;
280 }
281
282 if ((int) img->d_w != avctx->width || (int) img->d_h != avctx->height) {
283 av_log(avctx, AV_LOG_INFO, "dimension change! %dx%d -> %dx%d\n",
284 avctx->width, avctx->height, img->d_w, img->d_h);
285 ret = ff_set_dimensions(avctx, img->d_w, img->d_h);
286 if (ret < 0)
287 return ret;
288 }
289
290 if (ctx->has_alpha_channel &&
291 (img->d_w != img_alpha->d_w ||
292 img->d_h != img_alpha->d_h ||
293 img->bit_depth != img_alpha->bit_depth)) {
294 av_log(avctx, AV_LOG_ERROR,
295 "Video dimensions %dx%d@%dbpc differ from alpha dimensions %dx%d@%dbpc\n",
296 img->d_w, img->d_h, img->bit_depth,
297 img_alpha->d_w, img_alpha->d_h, img_alpha->bit_depth);
298 return AVERROR_INVALIDDATA;
299 }
300
301 planes[0] = img->planes[VPX_PLANE_Y];
302 planes[1] = img->planes[VPX_PLANE_U];
303 planes[2] = img->planes[VPX_PLANE_V];
304 planes[3] =
305 ctx->has_alpha_channel ? img_alpha->planes[VPX_PLANE_Y] : NULL;
306 linesizes[0] = img->stride[VPX_PLANE_Y];
307 linesizes[1] = img->stride[VPX_PLANE_U];
308 linesizes[2] = img->stride[VPX_PLANE_V];
309 linesizes[3] =
310 ctx->has_alpha_channel ? img_alpha->stride[VPX_PLANE_Y] : 0;
311
312 if (img->fb_priv && (!ctx->has_alpha_channel || img_alpha->fb_priv)) {
313 ret = ff_decode_frame_props(avctx, picture);
314 if (ret < 0)
315 return ret;
316 picture->buf[0] = av_buffer_ref(img->fb_priv);
317 if (!picture->buf[0])
318 return AVERROR(ENOMEM);
319 if (ctx->has_alpha_channel) {
320 picture->buf[1] = av_buffer_ref(img_alpha->fb_priv);
321 if (!picture->buf[1]) {
322 av_frame_unref(picture);
323 return AVERROR(ENOMEM);
324 }
325 }
326 for (int i = 0; i < 4; i++) {
327 picture->data[i] = planes[i];
328 picture->linesize[i] = linesizes[i];
329 }
330 } else {
331 if ((ret = ff_get_buffer(avctx, picture, 0)) < 0)
332 return ret;
333 av_image_copy(picture->data, picture->linesize, (const uint8_t**)planes,
334 linesizes, avctx->pix_fmt, img->d_w, img->d_h);
335 }
336 *got_frame = 1;
337 }
338 return avpkt->size;
339 }
340
vpx_free(AVCodecContext *avctx)341 static av_cold int vpx_free(AVCodecContext *avctx)
342 {
343 VPxContext *ctx = avctx->priv_data;
344 vpx_codec_destroy(&ctx->decoder);
345 if (ctx->has_alpha_channel)
346 vpx_codec_destroy(&ctx->decoder_alpha);
347 av_buffer_pool_uninit(&ctx->pool);
348 return 0;
349 }
350
351 #if CONFIG_LIBVPX_VP8_DECODER
vp8_init(AVCodecContext *avctx)352 static av_cold int vp8_init(AVCodecContext *avctx)
353 {
354 VPxContext *ctx = avctx->priv_data;
355 return vpx_init(avctx, &ctx->decoder, vpx_codec_vp8_dx());
356 }
357
358 const FFCodec ff_libvpx_vp8_decoder = {
359 .p.name = "libvpx",
360 .p.long_name = NULL_IF_CONFIG_SMALL("libvpx VP8"),
361 .p.type = AVMEDIA_TYPE_VIDEO,
362 .p.id = AV_CODEC_ID_VP8,
363 .p.capabilities = AV_CODEC_CAP_OTHER_THREADS | AV_CODEC_CAP_DR1,
364 .p.wrapper_name = "libvpx",
365 .priv_data_size = sizeof(VPxContext),
366 .init = vp8_init,
367 .close = vpx_free,
368 FF_CODEC_DECODE_CB(vpx_decode),
369 .caps_internal = FF_CODEC_CAP_AUTO_THREADS,
370 };
371 #endif /* CONFIG_LIBVPX_VP8_DECODER */
372
373 #if CONFIG_LIBVPX_VP9_DECODER
vp9_init(AVCodecContext *avctx)374 static av_cold int vp9_init(AVCodecContext *avctx)
375 {
376 VPxContext *ctx = avctx->priv_data;
377 return vpx_init(avctx, &ctx->decoder, vpx_codec_vp9_dx());
378 }
379
380 FFCodec ff_libvpx_vp9_decoder = {
381 .p.name = "libvpx-vp9",
382 .p.long_name = NULL_IF_CONFIG_SMALL("libvpx VP9"),
383 .p.type = AVMEDIA_TYPE_VIDEO,
384 .p.id = AV_CODEC_ID_VP9,
385 .p.capabilities = AV_CODEC_CAP_OTHER_THREADS,
386 .p.profiles = NULL_IF_CONFIG_SMALL(ff_vp9_profiles),
387 .p.wrapper_name = "libvpx",
388 .priv_data_size = sizeof(VPxContext),
389 .init = vp9_init,
390 .close = vpx_free,
391 FF_CODEC_DECODE_CB(vpx_decode),
392 .caps_internal = FF_CODEC_CAP_AUTO_THREADS,
393 .init_static_data = ff_vp9_init_static,
394 };
395 #endif /* CONFIG_LIBVPX_VP9_DECODER */
396