1/* 2 * V4L2 buffer helper functions. 3 * 4 * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org> 5 * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org> 6 * 7 * This file is part of FFmpeg. 8 * 9 * FFmpeg is free software; you can redistribute it and/or 10 * modify it under the terms of the GNU Lesser General Public 11 * License as published by the Free Software Foundation; either 12 * version 2.1 of the License, or (at your option) any later version. 13 * 14 * FFmpeg is distributed in the hope that it will be useful, 15 * but WITHOUT ANY WARRANTY; without even the implied warranty of 16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 17 * Lesser General Public License for more details. 18 * 19 * You should have received a copy of the GNU Lesser General Public 20 * License along with FFmpeg; if not, write to the Free Software 21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 22 */ 23 24#include <linux/videodev2.h> 25#include <sys/ioctl.h> 26#include <sys/mman.h> 27#include <unistd.h> 28#include <fcntl.h> 29#include <poll.h> 30#include "libavcodec/avcodec.h" 31#include "libavutil/pixdesc.h" 32#include "v4l2_context.h" 33#include "v4l2_buffers.h" 34#include "v4l2_m2m.h" 35 36#define USEC_PER_SEC 1000000 37static AVRational v4l2_timebase = { 1, USEC_PER_SEC }; 38 39static inline V4L2m2mContext *buf_to_m2mctx(V4L2Buffer *buf) 40{ 41 return V4L2_TYPE_IS_OUTPUT(buf->context->type) ? 42 container_of(buf->context, V4L2m2mContext, output) : 43 container_of(buf->context, V4L2m2mContext, capture); 44} 45 46static inline AVCodecContext *logger(V4L2Buffer *buf) 47{ 48 return buf_to_m2mctx(buf)->avctx; 49} 50 51static inline AVRational v4l2_get_timebase(V4L2Buffer *avbuf) 52{ 53 V4L2m2mContext *s = buf_to_m2mctx(avbuf); 54 55 if (s->avctx->pkt_timebase.num) 56 return s->avctx->pkt_timebase; 57 return s->avctx->time_base; 58} 59 60static inline void v4l2_set_pts(V4L2Buffer *out, int64_t pts) 61{ 62 int64_t v4l2_pts; 63 64 if (pts == AV_NOPTS_VALUE) 65 pts = 0; 66 67 /* convert pts to v4l2 timebase */ 68 v4l2_pts = av_rescale_q(pts, v4l2_get_timebase(out), v4l2_timebase); 69 out->buf.timestamp.tv_usec = v4l2_pts % USEC_PER_SEC; 70 out->buf.timestamp.tv_sec = v4l2_pts / USEC_PER_SEC; 71} 72 73static inline int64_t v4l2_get_pts(V4L2Buffer *avbuf) 74{ 75 int64_t v4l2_pts; 76 77 /* convert pts back to encoder timebase */ 78 v4l2_pts = (int64_t)avbuf->buf.timestamp.tv_sec * USEC_PER_SEC + 79 avbuf->buf.timestamp.tv_usec; 80 81 return av_rescale_q(v4l2_pts, v4l2_timebase, v4l2_get_timebase(avbuf)); 82} 83 84static enum AVColorPrimaries v4l2_get_color_primaries(V4L2Buffer *buf) 85{ 86 enum v4l2_ycbcr_encoding ycbcr; 87 enum v4l2_colorspace cs; 88 89 cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 90 buf->context->format.fmt.pix_mp.colorspace : 91 buf->context->format.fmt.pix.colorspace; 92 93 ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 94 buf->context->format.fmt.pix_mp.ycbcr_enc: 95 buf->context->format.fmt.pix.ycbcr_enc; 96 97 switch(ycbcr) { 98 case V4L2_YCBCR_ENC_XV709: 99 case V4L2_YCBCR_ENC_709: return AVCOL_PRI_BT709; 100 case V4L2_YCBCR_ENC_XV601: 101 case V4L2_YCBCR_ENC_601:return AVCOL_PRI_BT470M; 102 default: 103 break; 104 } 105 106 switch(cs) { 107 case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_PRI_BT470BG; 108 case V4L2_COLORSPACE_SMPTE170M: return AVCOL_PRI_SMPTE170M; 109 case V4L2_COLORSPACE_SMPTE240M: return AVCOL_PRI_SMPTE240M; 110 case V4L2_COLORSPACE_BT2020: return AVCOL_PRI_BT2020; 111 default: 112 break; 113 } 114 115 return AVCOL_PRI_UNSPECIFIED; 116} 117 118static enum AVColorRange v4l2_get_color_range(V4L2Buffer *buf) 119{ 120 enum v4l2_quantization qt; 121 122 qt = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 123 buf->context->format.fmt.pix_mp.quantization : 124 buf->context->format.fmt.pix.quantization; 125 126 switch (qt) { 127 case V4L2_QUANTIZATION_LIM_RANGE: return AVCOL_RANGE_MPEG; 128 case V4L2_QUANTIZATION_FULL_RANGE: return AVCOL_RANGE_JPEG; 129 default: 130 break; 131 } 132 133 return AVCOL_RANGE_UNSPECIFIED; 134} 135 136static enum AVColorSpace v4l2_get_color_space(V4L2Buffer *buf) 137{ 138 enum v4l2_ycbcr_encoding ycbcr; 139 enum v4l2_colorspace cs; 140 141 cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 142 buf->context->format.fmt.pix_mp.colorspace : 143 buf->context->format.fmt.pix.colorspace; 144 145 ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 146 buf->context->format.fmt.pix_mp.ycbcr_enc: 147 buf->context->format.fmt.pix.ycbcr_enc; 148 149 switch(cs) { 150 case V4L2_COLORSPACE_SRGB: return AVCOL_SPC_RGB; 151 case V4L2_COLORSPACE_REC709: return AVCOL_SPC_BT709; 152 case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_SPC_FCC; 153 case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_SPC_BT470BG; 154 case V4L2_COLORSPACE_SMPTE170M: return AVCOL_SPC_SMPTE170M; 155 case V4L2_COLORSPACE_SMPTE240M: return AVCOL_SPC_SMPTE240M; 156 case V4L2_COLORSPACE_BT2020: 157 if (ycbcr == V4L2_YCBCR_ENC_BT2020_CONST_LUM) 158 return AVCOL_SPC_BT2020_CL; 159 else 160 return AVCOL_SPC_BT2020_NCL; 161 default: 162 break; 163 } 164 165 return AVCOL_SPC_UNSPECIFIED; 166} 167 168static enum AVColorTransferCharacteristic v4l2_get_color_trc(V4L2Buffer *buf) 169{ 170 enum v4l2_ycbcr_encoding ycbcr; 171 enum v4l2_xfer_func xfer; 172 enum v4l2_colorspace cs; 173 174 cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 175 buf->context->format.fmt.pix_mp.colorspace : 176 buf->context->format.fmt.pix.colorspace; 177 178 ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 179 buf->context->format.fmt.pix_mp.ycbcr_enc: 180 buf->context->format.fmt.pix.ycbcr_enc; 181 182 xfer = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ? 183 buf->context->format.fmt.pix_mp.xfer_func: 184 buf->context->format.fmt.pix.xfer_func; 185 186 switch (xfer) { 187 case V4L2_XFER_FUNC_709: return AVCOL_TRC_BT709; 188 case V4L2_XFER_FUNC_SRGB: return AVCOL_TRC_IEC61966_2_1; 189 default: 190 break; 191 } 192 193 switch (cs) { 194 case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_TRC_GAMMA22; 195 case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_TRC_GAMMA28; 196 case V4L2_COLORSPACE_SMPTE170M: return AVCOL_TRC_SMPTE170M; 197 case V4L2_COLORSPACE_SMPTE240M: return AVCOL_TRC_SMPTE240M; 198 default: 199 break; 200 } 201 202 switch (ycbcr) { 203 case V4L2_YCBCR_ENC_XV709: 204 case V4L2_YCBCR_ENC_XV601: return AVCOL_TRC_BT1361_ECG; 205 default: 206 break; 207 } 208 209 return AVCOL_TRC_UNSPECIFIED; 210} 211 212static void v4l2_free_buffer(void *opaque, uint8_t *unused) 213{ 214 V4L2Buffer* avbuf = opaque; 215 V4L2m2mContext *s = buf_to_m2mctx(avbuf); 216 217 if (atomic_fetch_sub(&avbuf->context_refcount, 1) == 1) { 218 atomic_fetch_sub_explicit(&s->refcount, 1, memory_order_acq_rel); 219 220 if (s->reinit) { 221 if (!atomic_load(&s->refcount)) 222 sem_post(&s->refsync); 223 } else { 224 if (s->draining && V4L2_TYPE_IS_OUTPUT(avbuf->context->type)) { 225 /* no need to queue more buffers to the driver */ 226 avbuf->status = V4L2BUF_AVAILABLE; 227 } 228 else if (avbuf->context->streamon) 229 ff_v4l2_buffer_enqueue(avbuf); 230 } 231 232 av_buffer_unref(&avbuf->context_ref); 233 } 234} 235 236static int v4l2_buf_increase_ref(V4L2Buffer *in) 237{ 238 V4L2m2mContext *s = buf_to_m2mctx(in); 239 240 if (in->context_ref) 241 atomic_fetch_add(&in->context_refcount, 1); 242 else { 243 in->context_ref = av_buffer_ref(s->self_ref); 244 if (!in->context_ref) 245 return AVERROR(ENOMEM); 246 247 in->context_refcount = 1; 248 } 249 250 in->status = V4L2BUF_RET_USER; 251 atomic_fetch_add_explicit(&s->refcount, 1, memory_order_relaxed); 252 253 return 0; 254} 255 256static int v4l2_buf_to_bufref(V4L2Buffer *in, int plane, AVBufferRef **buf) 257{ 258 int ret; 259 260 if (plane >= in->num_planes) 261 return AVERROR(EINVAL); 262 263 /* even though most encoders return 0 in data_offset encoding vp8 does require this value */ 264 *buf = av_buffer_create((char *)in->plane_info[plane].mm_addr + in->planes[plane].data_offset, 265 in->plane_info[plane].length, v4l2_free_buffer, in, 0); 266 if (!*buf) 267 return AVERROR(ENOMEM); 268 269 ret = v4l2_buf_increase_ref(in); 270 if (ret) 271 av_buffer_unref(buf); 272 273 return ret; 274} 275 276static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t* data, int size, int offset) 277{ 278 unsigned int bytesused, length; 279 280 if (plane >= out->num_planes) 281 return AVERROR(EINVAL); 282 283 length = out->plane_info[plane].length; 284 bytesused = FFMIN(size+offset, length); 285 286 memcpy((uint8_t*)out->plane_info[plane].mm_addr+offset, data, FFMIN(size, length-offset)); 287 288 if (V4L2_TYPE_IS_MULTIPLANAR(out->buf.type)) { 289 out->planes[plane].bytesused = bytesused; 290 out->planes[plane].length = length; 291 } else { 292 out->buf.bytesused = bytesused; 293 out->buf.length = length; 294 } 295 296 return 0; 297} 298 299static int v4l2_buffer_buf_to_swframe(AVFrame *frame, V4L2Buffer *avbuf) 300{ 301 int i, ret; 302 303 frame->format = avbuf->context->av_pix_fmt; 304 305 for (i = 0; i < avbuf->num_planes; i++) { 306 ret = v4l2_buf_to_bufref(avbuf, i, &frame->buf[i]); 307 if (ret) 308 return ret; 309 310 frame->linesize[i] = avbuf->plane_info[i].bytesperline; 311 frame->data[i] = frame->buf[i]->data; 312 } 313 314 /* fixup special cases */ 315 switch (avbuf->context->av_pix_fmt) { 316 case AV_PIX_FMT_NV12: 317 case AV_PIX_FMT_NV21: 318 if (avbuf->num_planes > 1) 319 break; 320 frame->linesize[1] = avbuf->plane_info[0].bytesperline; 321 frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height; 322 break; 323 324 case AV_PIX_FMT_YUV420P: 325 if (avbuf->num_planes > 1) 326 break; 327 frame->linesize[1] = avbuf->plane_info[0].bytesperline >> 1; 328 frame->linesize[2] = avbuf->plane_info[0].bytesperline >> 1; 329 frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height; 330 frame->data[2] = frame->data[1] + ((avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height) >> 2); 331 break; 332 333 default: 334 break; 335 } 336 337 return 0; 338} 339 340static int v4l2_buffer_swframe_to_buf(const AVFrame *frame, V4L2Buffer *out) 341{ 342 int i, ret; 343 struct v4l2_format fmt = out->context->format; 344 int pixel_format = V4L2_TYPE_IS_MULTIPLANAR(fmt.type) ? 345 fmt.fmt.pix_mp.pixelformat : fmt.fmt.pix.pixelformat; 346 int height = V4L2_TYPE_IS_MULTIPLANAR(fmt.type) ? 347 fmt.fmt.pix_mp.height : fmt.fmt.pix.height; 348 int is_planar_format = 0; 349 350 switch (pixel_format) { 351 case V4L2_PIX_FMT_YUV420M: 352 case V4L2_PIX_FMT_YVU420M: 353#ifdef V4L2_PIX_FMT_YUV422M 354 case V4L2_PIX_FMT_YUV422M: 355#endif 356#ifdef V4L2_PIX_FMT_YVU422M 357 case V4L2_PIX_FMT_YVU422M: 358#endif 359#ifdef V4L2_PIX_FMT_YUV444M 360 case V4L2_PIX_FMT_YUV444M: 361#endif 362#ifdef V4L2_PIX_FMT_YVU444M 363 case V4L2_PIX_FMT_YVU444M: 364#endif 365 case V4L2_PIX_FMT_NV12M: 366 case V4L2_PIX_FMT_NV21M: 367 case V4L2_PIX_FMT_NV12MT_16X16: 368 case V4L2_PIX_FMT_NV12MT: 369 case V4L2_PIX_FMT_NV16M: 370 case V4L2_PIX_FMT_NV61M: 371 is_planar_format = 1; 372 } 373 374 if (!is_planar_format) { 375 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format); 376 int planes_nb = 0; 377 int offset = 0; 378 379 for (i = 0; i < desc->nb_components; i++) 380 planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1); 381 382 for (i = 0; i < planes_nb; i++) { 383 int size, h = height; 384 if (i == 1 || i == 2) { 385 h = AV_CEIL_RSHIFT(h, desc->log2_chroma_h); 386 } 387 size = frame->linesize[i] * h; 388 ret = v4l2_bufref_to_buf(out, 0, frame->data[i], size, offset); 389 if (ret) 390 return ret; 391 offset += size; 392 } 393 return 0; 394 } 395 396 for (i = 0; i < out->num_planes; i++) { 397 ret = v4l2_bufref_to_buf(out, i, frame->buf[i]->data, frame->buf[i]->size, 0); 398 if (ret) 399 return ret; 400 } 401 402 return 0; 403} 404 405/****************************************************************************** 406 * 407 * V4L2Buffer interface 408 * 409 ******************************************************************************/ 410 411int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer *out) 412{ 413 v4l2_set_pts(out, frame->pts); 414 415 return v4l2_buffer_swframe_to_buf(frame, out); 416} 417 418int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf) 419{ 420 int ret; 421 422 av_frame_unref(frame); 423 424 /* 1. get references to the actual data */ 425 ret = v4l2_buffer_buf_to_swframe(frame, avbuf); 426 if (ret) 427 return ret; 428 429 /* 2. get frame information */ 430 frame->key_frame = !!(avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME); 431 frame->color_primaries = v4l2_get_color_primaries(avbuf); 432 frame->colorspace = v4l2_get_color_space(avbuf); 433 frame->color_range = v4l2_get_color_range(avbuf); 434 frame->color_trc = v4l2_get_color_trc(avbuf); 435 frame->pts = v4l2_get_pts(avbuf); 436 frame->pkt_dts = AV_NOPTS_VALUE; 437 438 /* these values are updated also during re-init in v4l2_process_driver_event */ 439 frame->height = avbuf->context->height; 440 frame->width = avbuf->context->width; 441 frame->sample_aspect_ratio = avbuf->context->sample_aspect_ratio; 442 443 /* 3. report errors upstream */ 444 if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) { 445 av_log(logger(avbuf), AV_LOG_ERROR, "%s: driver decode error\n", avbuf->context->name); 446 frame->decode_error_flags |= FF_DECODE_ERROR_INVALID_BITSTREAM; 447 } 448 449 return 0; 450} 451 452int ff_v4l2_buffer_buf_to_avpkt(AVPacket *pkt, V4L2Buffer *avbuf) 453{ 454 int ret; 455 456 av_packet_unref(pkt); 457 ret = v4l2_buf_to_bufref(avbuf, 0, &pkt->buf); 458 if (ret) 459 return ret; 460 461 pkt->size = V4L2_TYPE_IS_MULTIPLANAR(avbuf->buf.type) ? avbuf->buf.m.planes[0].bytesused : avbuf->buf.bytesused; 462 pkt->data = pkt->buf->data; 463 464 if (avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME) 465 pkt->flags |= AV_PKT_FLAG_KEY; 466 467 if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) { 468 av_log(logger(avbuf), AV_LOG_ERROR, "%s driver encode error\n", avbuf->context->name); 469 pkt->flags |= AV_PKT_FLAG_CORRUPT; 470 } 471 472 pkt->dts = pkt->pts = v4l2_get_pts(avbuf); 473 474 return 0; 475} 476 477int ff_v4l2_buffer_avpkt_to_buf(const AVPacket *pkt, V4L2Buffer *out) 478{ 479 int ret; 480 481 ret = v4l2_bufref_to_buf(out, 0, pkt->data, pkt->size, 0); 482 if (ret) 483 return ret; 484 485 v4l2_set_pts(out, pkt->pts); 486 487 if (pkt->flags & AV_PKT_FLAG_KEY) 488 out->flags = V4L2_BUF_FLAG_KEYFRAME; 489 490 return 0; 491} 492 493int ff_v4l2_buffer_initialize(V4L2Buffer* avbuf, int index) 494{ 495 V4L2Context *ctx = avbuf->context; 496 int ret, i; 497 498 avbuf->buf.memory = V4L2_MEMORY_MMAP; 499 avbuf->buf.type = ctx->type; 500 avbuf->buf.index = index; 501 502 if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { 503 avbuf->buf.length = VIDEO_MAX_PLANES; 504 avbuf->buf.m.planes = avbuf->planes; 505 } 506 507 ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QUERYBUF, &avbuf->buf); 508 if (ret < 0) 509 return AVERROR(errno); 510 511 if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { 512 avbuf->num_planes = 0; 513 /* in MP, the V4L2 API states that buf.length means num_planes */ 514 for (i = 0; i < avbuf->buf.length; i++) { 515 if (avbuf->buf.m.planes[i].length) 516 avbuf->num_planes++; 517 } 518 } else 519 avbuf->num_planes = 1; 520 521 for (i = 0; i < avbuf->num_planes; i++) { 522 523 avbuf->plane_info[i].bytesperline = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? 524 ctx->format.fmt.pix_mp.plane_fmt[i].bytesperline : 525 ctx->format.fmt.pix.bytesperline; 526 527 if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { 528 avbuf->plane_info[i].length = avbuf->buf.m.planes[i].length; 529 avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.m.planes[i].length, 530 PROT_READ | PROT_WRITE, MAP_SHARED, 531 buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.planes[i].m.mem_offset); 532 } else { 533 avbuf->plane_info[i].length = avbuf->buf.length; 534 avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.length, 535 PROT_READ | PROT_WRITE, MAP_SHARED, 536 buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.offset); 537 } 538 539 if (avbuf->plane_info[i].mm_addr == MAP_FAILED) 540 return AVERROR(ENOMEM); 541 } 542 543 avbuf->status = V4L2BUF_AVAILABLE; 544 545 if (V4L2_TYPE_IS_OUTPUT(ctx->type)) 546 return 0; 547 548 if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { 549 avbuf->buf.m.planes = avbuf->planes; 550 avbuf->buf.length = avbuf->num_planes; 551 552 } else { 553 avbuf->buf.bytesused = avbuf->planes[0].bytesused; 554 avbuf->buf.length = avbuf->planes[0].length; 555 } 556 557 return ff_v4l2_buffer_enqueue(avbuf); 558} 559 560int ff_v4l2_buffer_enqueue(V4L2Buffer* avbuf) 561{ 562 int ret; 563 564 avbuf->buf.flags = avbuf->flags; 565 566 ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QBUF, &avbuf->buf); 567 if (ret < 0) 568 return AVERROR(errno); 569 570 avbuf->status = V4L2BUF_IN_DRIVER; 571 572 return 0; 573} 574