1/* 2 * Copyright (c) 2019 James Almer <jamrial@gmail.com> 3 * 4 * This file is part of FFmpeg. 5 * 6 * FFmpeg is free software; you can redistribute it and/or 7 * modify it under the terms of the GNU Lesser General Public 8 * License as published by the Free Software Foundation; either 9 * version 2.1 of the License, or (at your option) any later version. 10 * 11 * FFmpeg is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 * Lesser General Public License for more details. 15 * 16 * You should have received a copy of the GNU Lesser General Public 17 * License along with FFmpeg; if not, write to the Free Software 18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 */ 20 21/** 22 * @file 23 * This bitstream filter splits AV1 Temporal Units into packets containing 24 * just one frame, plus any leading and trailing OBUs that may be present at 25 * the beginning or end, respectively. 26 * 27 * Temporal Units already containing only one frame will be passed through 28 * unchanged. When splitting can't be performed, the Temporal Unit will be 29 * passed through containing only the remaining OBUs starting from the first 30 * one after the last successfully split frame. 31 */ 32 33#include "libavutil/avassert.h" 34 35#include "bsf.h" 36#include "bsf_internal.h" 37#include "cbs.h" 38#include "cbs_av1.h" 39 40typedef struct AV1FSplitContext { 41 AVPacket *buffer_pkt; 42 CodedBitstreamContext *cbc; 43 CodedBitstreamFragment temporal_unit; 44 45 int nb_frames; 46 int cur_frame; 47 int cur_frame_idx; 48 int last_frame_idx; 49} AV1FSplitContext; 50 51static int av1_frame_split_filter(AVBSFContext *ctx, AVPacket *out) 52{ 53 AV1FSplitContext *s = ctx->priv_data; 54 CodedBitstreamFragment *td = &s->temporal_unit; 55 int i, ret; 56 int split = !!s->buffer_pkt->data; 57 58 if (!s->buffer_pkt->data) { 59 int nb_frames = 0; 60 61 ret = ff_bsf_get_packet_ref(ctx, s->buffer_pkt); 62 if (ret < 0) 63 return ret; 64 65 ret = ff_cbs_read_packet(s->cbc, td, s->buffer_pkt); 66 if (ret < 0) { 67 av_log(ctx, AV_LOG_WARNING, "Failed to parse temporal unit.\n"); 68 goto passthrough; 69 } 70 71 for (i = 0; i < td->nb_units; i++) { 72 CodedBitstreamUnit *unit = &td->units[i]; 73 74 if (unit->type == AV1_OBU_FRAME || 75 unit->type == AV1_OBU_FRAME_HEADER) 76 nb_frames++; 77 else if (unit->type == AV1_OBU_TILE_LIST) { 78 av_log(ctx, AV_LOG_VERBOSE, "Large scale tiles are unsupported.\n"); 79 goto passthrough; 80 } 81 } 82 if (nb_frames > 1) { 83 s->cur_frame = 0; 84 s->cur_frame_idx = s->last_frame_idx = 0; 85 s->nb_frames = nb_frames; 86 split = 1; 87 } 88 } 89 90 if (split) { 91 AV1RawFrameHeader *frame = NULL; 92 int cur_frame_type = -1, size = 0; 93 94 for (i = s->cur_frame_idx; i < td->nb_units; i++) { 95 CodedBitstreamUnit *unit = &td->units[i]; 96 97 size += unit->data_size; 98 if (unit->type == AV1_OBU_FRAME) { 99 AV1RawOBU *obu = unit->content; 100 101 if (frame) { 102 av_log(ctx, AV_LOG_WARNING, "Frame OBU found when Tile data for a " 103 "previous frame was expected.\n"); 104 goto passthrough; 105 } 106 107 frame = &obu->obu.frame.header; 108 cur_frame_type = obu->header.obu_type; 109 s->last_frame_idx = s->cur_frame_idx; 110 s->cur_frame_idx = i + 1; 111 s->cur_frame++; 112 113 // split here unless it's the last frame, in which case 114 // include every trailing OBU 115 if (s->cur_frame < s->nb_frames) 116 break; 117 } else if (unit->type == AV1_OBU_FRAME_HEADER) { 118 AV1RawOBU *obu = unit->content; 119 120 if (frame) { 121 av_log(ctx, AV_LOG_WARNING, "Frame Header OBU found when Tile data for a " 122 "previous frame was expected.\n"); 123 goto passthrough; 124 } 125 126 frame = &obu->obu.frame_header; 127 cur_frame_type = obu->header.obu_type; 128 s->last_frame_idx = s->cur_frame_idx; 129 s->cur_frame++; 130 131 // split here if show_existing_frame unless it's the last 132 // frame, in which case include every trailing OBU 133 if (frame->show_existing_frame && 134 s->cur_frame < s->nb_frames) { 135 s->cur_frame_idx = i + 1; 136 break; 137 } 138 } else if (unit->type == AV1_OBU_TILE_GROUP) { 139 AV1RawOBU *obu = unit->content; 140 AV1RawTileGroup *group = &obu->obu.tile_group; 141 142 if (!frame || cur_frame_type != AV1_OBU_FRAME_HEADER) { 143 av_log(ctx, AV_LOG_WARNING, "Unexpected Tile Group OBU found before a " 144 "Frame Header.\n"); 145 goto passthrough; 146 } 147 148 if ((group->tg_end == (frame->tile_cols * frame->tile_rows) - 1) && 149 // include every trailing OBU with the last frame 150 s->cur_frame < s->nb_frames) { 151 s->cur_frame_idx = i + 1; 152 break; 153 } 154 } 155 } 156 av_assert0(frame && s->cur_frame <= s->nb_frames); 157 158 ret = av_packet_ref(out, s->buffer_pkt); 159 if (ret < 0) 160 goto fail; 161 162 out->data = (uint8_t *)td->units[s->last_frame_idx].data; 163 out->size = size; 164 165 // skip the frame in the buffer packet if it's split successfully, so it's not present 166 // if the packet is passed through in case of failure when splitting another frame. 167 s->buffer_pkt->data += size; 168 s->buffer_pkt->size -= size; 169 170 if (!frame->show_existing_frame && !frame->show_frame) 171 out->pts = AV_NOPTS_VALUE; 172 173 if (s->cur_frame == s->nb_frames) { 174 av_packet_unref(s->buffer_pkt); 175 ff_cbs_fragment_reset(td); 176 } 177 178 return 0; 179 } 180 181passthrough: 182 av_packet_move_ref(out, s->buffer_pkt); 183 184 ret = 0; 185fail: 186 if (ret < 0) { 187 av_packet_unref(out); 188 av_packet_unref(s->buffer_pkt); 189 } 190 ff_cbs_fragment_reset(td); 191 192 return ret; 193} 194 195static const CodedBitstreamUnitType decompose_unit_types[] = { 196 AV1_OBU_TEMPORAL_DELIMITER, 197 AV1_OBU_SEQUENCE_HEADER, 198 AV1_OBU_FRAME_HEADER, 199 AV1_OBU_TILE_GROUP, 200 AV1_OBU_FRAME, 201}; 202 203static int av1_frame_split_init(AVBSFContext *ctx) 204{ 205 AV1FSplitContext *s = ctx->priv_data; 206 CodedBitstreamFragment *td = &s->temporal_unit; 207 int ret; 208 209 s->buffer_pkt = av_packet_alloc(); 210 if (!s->buffer_pkt) 211 return AVERROR(ENOMEM); 212 213 ret = ff_cbs_init(&s->cbc, AV_CODEC_ID_AV1, ctx); 214 if (ret < 0) 215 return ret; 216 217 s->cbc->decompose_unit_types = decompose_unit_types; 218 s->cbc->nb_decompose_unit_types = FF_ARRAY_ELEMS(decompose_unit_types); 219 220 if (!ctx->par_in->extradata_size) 221 return 0; 222 223 ret = ff_cbs_read_extradata(s->cbc, td, ctx->par_in); 224 if (ret < 0) 225 av_log(ctx, AV_LOG_WARNING, "Failed to parse extradata.\n"); 226 227 ff_cbs_fragment_reset(td); 228 229 return 0; 230} 231 232static void av1_frame_split_flush(AVBSFContext *ctx) 233{ 234 AV1FSplitContext *s = ctx->priv_data; 235 236 av_packet_unref(s->buffer_pkt); 237 ff_cbs_fragment_reset(&s->temporal_unit); 238} 239 240static void av1_frame_split_close(AVBSFContext *ctx) 241{ 242 AV1FSplitContext *s = ctx->priv_data; 243 244 av_packet_free(&s->buffer_pkt); 245 ff_cbs_fragment_free(&s->temporal_unit); 246 ff_cbs_close(&s->cbc); 247} 248 249static const enum AVCodecID av1_frame_split_codec_ids[] = { 250 AV_CODEC_ID_AV1, AV_CODEC_ID_NONE, 251}; 252 253const FFBitStreamFilter ff_av1_frame_split_bsf = { 254 .p.name = "av1_frame_split", 255 .p.codec_ids = av1_frame_split_codec_ids, 256 .priv_data_size = sizeof(AV1FSplitContext), 257 .init = av1_frame_split_init, 258 .flush = av1_frame_split_flush, 259 .close = av1_frame_split_close, 260 .filter = av1_frame_split_filter, 261}; 262