1cb93a386Sopenharmony_ci/* 2cb93a386Sopenharmony_ci * Copyright 2019 Google Inc. 3cb93a386Sopenharmony_ci * 4cb93a386Sopenharmony_ci * Use of this source code is governed by a BSD-style license that can be 5cb93a386Sopenharmony_ci * found in the LICENSE file. 6cb93a386Sopenharmony_ci */ 7cb93a386Sopenharmony_ci 8cb93a386Sopenharmony_ci#include "experimental/ffmpeg/SkVideoDecoder.h" 9cb93a386Sopenharmony_ci#include "include/core/SkColorSpace.h" 10cb93a386Sopenharmony_ci#include "include/core/SkImage.h" 11cb93a386Sopenharmony_ci#include "include/core/SkYUVAPixmaps.h" 12cb93a386Sopenharmony_ci 13cb93a386Sopenharmony_cistatic SkYUVColorSpace get_yuvspace(AVColorSpace space) { 14cb93a386Sopenharmony_ci // this is pretty incomplete -- TODO: look to convert more AVColorSpaces 15cb93a386Sopenharmony_ci switch (space) { 16cb93a386Sopenharmony_ci case AVCOL_SPC_RGB: return kIdentity_SkYUVColorSpace; 17cb93a386Sopenharmony_ci case AVCOL_SPC_BT709: return kRec709_SkYUVColorSpace; 18cb93a386Sopenharmony_ci case AVCOL_SPC_SMPTE170M: 19cb93a386Sopenharmony_ci case AVCOL_SPC_SMPTE240M: 20cb93a386Sopenharmony_ci case AVCOL_SPC_BT470BG: return kRec601_SkYUVColorSpace; 21cb93a386Sopenharmony_ci default: break; 22cb93a386Sopenharmony_ci } 23cb93a386Sopenharmony_ci return kRec709_SkYUVColorSpace; 24cb93a386Sopenharmony_ci} 25cb93a386Sopenharmony_ci 26cb93a386Sopenharmony_cistruct av_transfer_characteristics { 27cb93a386Sopenharmony_ci // if x < beta delta * x 28cb93a386Sopenharmony_ci // else alpha * (x^gama) 29cb93a386Sopenharmony_ci float alpha, beta, gamma, delta; 30cb93a386Sopenharmony_ci}; 31cb93a386Sopenharmony_ci 32cb93a386Sopenharmony_ci// Tables extracted from vf_colorspace.c 33cb93a386Sopenharmony_ci 34cb93a386Sopenharmony_ciconst av_transfer_characteristics gTransfer[AVCOL_TRC_NB] = { 35cb93a386Sopenharmony_ci [AVCOL_TRC_BT709] = { 1.099, 0.018, 0.45, 4.5 }, 36cb93a386Sopenharmony_ci [AVCOL_TRC_GAMMA22] = { 1.0, 0.0, 1.0 / 2.2, 0.0 }, 37cb93a386Sopenharmony_ci [AVCOL_TRC_GAMMA28] = { 1.0, 0.0, 1.0 / 2.8, 0.0 }, 38cb93a386Sopenharmony_ci [AVCOL_TRC_SMPTE170M] = { 1.099, 0.018, 0.45, 4.5 }, 39cb93a386Sopenharmony_ci [AVCOL_TRC_SMPTE240M] = { 1.1115, 0.0228, 0.45, 4.0 }, 40cb93a386Sopenharmony_ci [AVCOL_TRC_IEC61966_2_1] = { 1.055, 0.0031308, 1.0 / 2.4, 12.92 }, 41cb93a386Sopenharmony_ci [AVCOL_TRC_IEC61966_2_4] = { 1.099, 0.018, 0.45, 4.5 }, 42cb93a386Sopenharmony_ci [AVCOL_TRC_BT2020_10] = { 1.099, 0.018, 0.45, 4.5 }, 43cb93a386Sopenharmony_ci [AVCOL_TRC_BT2020_12] = { 1.0993, 0.0181, 0.45, 4.5 }, 44cb93a386Sopenharmony_ci}; 45cb93a386Sopenharmony_ci 46cb93a386Sopenharmony_cistatic skcms_TransferFunction compute_transfer(AVColorTransferCharacteristic t) { 47cb93a386Sopenharmony_ci const av_transfer_characteristics* av = &gTransfer[AVCOL_TRC_BT709]; 48cb93a386Sopenharmony_ci if ((unsigned)t < AVCOL_TRC_NB) { 49cb93a386Sopenharmony_ci av = &gTransfer[t]; 50cb93a386Sopenharmony_ci } 51cb93a386Sopenharmony_ci if (av->alpha == 0) { 52cb93a386Sopenharmony_ci av = &gTransfer[AVCOL_TRC_BT709]; 53cb93a386Sopenharmony_ci } 54cb93a386Sopenharmony_ci 55cb93a386Sopenharmony_ci skcms_TransferFunction linear_to_encoded = { 56cb93a386Sopenharmony_ci av->gamma, sk_float_pow(av->alpha, 1/av->gamma), 0, av->delta, av->beta, 1 - av->alpha, 0, 57cb93a386Sopenharmony_ci }; 58cb93a386Sopenharmony_ci skcms_TransferFunction encoded_to_linear; 59cb93a386Sopenharmony_ci bool success = skcms_TransferFunction_invert(&linear_to_encoded, &encoded_to_linear); 60cb93a386Sopenharmony_ci SkASSERT(success); 61cb93a386Sopenharmony_ci 62cb93a386Sopenharmony_ci return encoded_to_linear; 63cb93a386Sopenharmony_ci} 64cb93a386Sopenharmony_ci 65cb93a386Sopenharmony_cienum Whitepoint { 66cb93a386Sopenharmony_ci WP_D65, 67cb93a386Sopenharmony_ci WP_C, 68cb93a386Sopenharmony_ci WP_DCI, 69cb93a386Sopenharmony_ci WP_E, 70cb93a386Sopenharmony_ci WP_NB, 71cb93a386Sopenharmony_ci}; 72cb93a386Sopenharmony_ci 73cb93a386Sopenharmony_ciconst SkPoint gWP[WP_NB] = { 74cb93a386Sopenharmony_ci [WP_D65] = { 0.3127f, 0.3290f }, 75cb93a386Sopenharmony_ci [WP_C] = { 0.3100f, 0.3160f }, 76cb93a386Sopenharmony_ci [WP_DCI] = { 0.3140f, 0.3510f }, 77cb93a386Sopenharmony_ci [WP_E] = { 1/3.0f, 1/3.0f }, 78cb93a386Sopenharmony_ci}; 79cb93a386Sopenharmony_ci 80cb93a386Sopenharmony_ci#define ExpandWP(index) gWP[index].fX, gWP[index].fY 81cb93a386Sopenharmony_ci 82cb93a386Sopenharmony_ciconst SkColorSpacePrimaries gPrimaries[AVCOL_PRI_NB] = { 83cb93a386Sopenharmony_ci [AVCOL_PRI_BT709] = { 0.640f, 0.330f, 0.300f, 0.600f, 0.150f, 0.060f, ExpandWP(WP_D65) }, 84cb93a386Sopenharmony_ci [AVCOL_PRI_BT470M] = { 0.670f, 0.330f, 0.210f, 0.710f, 0.140f, 0.080f, ExpandWP(WP_C) }, 85cb93a386Sopenharmony_ci [AVCOL_PRI_BT470BG] = { 0.640f, 0.330f, 0.290f, 0.600f, 0.150f, 0.060f, ExpandWP(WP_D65) }, 86cb93a386Sopenharmony_ci [AVCOL_PRI_SMPTE170M] = { 0.630f, 0.340f, 0.310f, 0.595f, 0.155f, 0.070f, ExpandWP(WP_D65) }, 87cb93a386Sopenharmony_ci [AVCOL_PRI_SMPTE240M] = { 0.630f, 0.340f, 0.310f, 0.595f, 0.155f, 0.070f, ExpandWP(WP_D65) }, 88cb93a386Sopenharmony_ci [AVCOL_PRI_SMPTE428] = { 0.735f, 0.265f, 0.274f, 0.718f, 0.167f, 0.009f, ExpandWP(WP_E) }, 89cb93a386Sopenharmony_ci [AVCOL_PRI_SMPTE431] = { 0.680f, 0.320f, 0.265f, 0.690f, 0.150f, 0.060f, ExpandWP(WP_DCI) }, 90cb93a386Sopenharmony_ci [AVCOL_PRI_SMPTE432] = { 0.680f, 0.320f, 0.265f, 0.690f, 0.150f, 0.060f, ExpandWP(WP_D65) }, 91cb93a386Sopenharmony_ci [AVCOL_PRI_FILM] = { 0.681f, 0.319f, 0.243f, 0.692f, 0.145f, 0.049f, ExpandWP(WP_C) }, 92cb93a386Sopenharmony_ci [AVCOL_PRI_BT2020] = { 0.708f, 0.292f, 0.170f, 0.797f, 0.131f, 0.046f, ExpandWP(WP_D65) }, 93cb93a386Sopenharmony_ci [AVCOL_PRI_JEDEC_P22] = { 0.630f, 0.340f, 0.295f, 0.605f, 0.155f, 0.077f, ExpandWP(WP_D65) }, 94cb93a386Sopenharmony_ci}; 95cb93a386Sopenharmony_ci 96cb93a386Sopenharmony_cisk_sp<SkColorSpace> make_colorspace(AVColorPrimaries primaries, 97cb93a386Sopenharmony_ci AVColorTransferCharacteristic transfer) { 98cb93a386Sopenharmony_ci if (primaries == AVCOL_PRI_BT709 && transfer == AVCOL_TRC_BT709) { 99cb93a386Sopenharmony_ci return SkColorSpace::MakeSRGB(); 100cb93a386Sopenharmony_ci } 101cb93a386Sopenharmony_ci 102cb93a386Sopenharmony_ci const SkColorSpacePrimaries* p = &gPrimaries[0]; 103cb93a386Sopenharmony_ci if ((unsigned)primaries < (unsigned)AVCOL_PRI_NB) { 104cb93a386Sopenharmony_ci p = &gPrimaries[primaries]; 105cb93a386Sopenharmony_ci } 106cb93a386Sopenharmony_ci 107cb93a386Sopenharmony_ci skcms_Matrix3x3 matrix; 108cb93a386Sopenharmony_ci p->toXYZD50(&matrix); 109cb93a386Sopenharmony_ci return SkColorSpace::MakeRGB(compute_transfer(transfer), matrix); 110cb93a386Sopenharmony_ci} 111cb93a386Sopenharmony_ci 112cb93a386Sopenharmony_ci// returns true on error (and may dump the particular error message) 113cb93a386Sopenharmony_cistatic bool check_err(int err, const int silentList[] = nullptr) { 114cb93a386Sopenharmony_ci if (err >= 0) { 115cb93a386Sopenharmony_ci return false; 116cb93a386Sopenharmony_ci } 117cb93a386Sopenharmony_ci 118cb93a386Sopenharmony_ci if (silentList) { 119cb93a386Sopenharmony_ci for (; *silentList; ++silentList) { 120cb93a386Sopenharmony_ci if (*silentList == err) { 121cb93a386Sopenharmony_ci return true; // we still report the error, but we don't printf 122cb93a386Sopenharmony_ci } 123cb93a386Sopenharmony_ci } 124cb93a386Sopenharmony_ci } 125cb93a386Sopenharmony_ci 126cb93a386Sopenharmony_ci char errbuf[128]; 127cb93a386Sopenharmony_ci const char *errbuf_ptr = errbuf; 128cb93a386Sopenharmony_ci 129cb93a386Sopenharmony_ci if (av_strerror(err, errbuf, sizeof(errbuf)) < 0) { 130cb93a386Sopenharmony_ci errbuf_ptr = strerror(AVUNERROR(err)); 131cb93a386Sopenharmony_ci } 132cb93a386Sopenharmony_ci SkDebugf("%s\n", errbuf_ptr); 133cb93a386Sopenharmony_ci return true; 134cb93a386Sopenharmony_ci} 135cb93a386Sopenharmony_ci 136cb93a386Sopenharmony_cistatic int skstream_read_packet(void* ctx, uint8_t* dstBuffer, int dstSize) { 137cb93a386Sopenharmony_ci SkStream* stream = (SkStream*)ctx; 138cb93a386Sopenharmony_ci int result = (int)stream->read(dstBuffer, dstSize); 139cb93a386Sopenharmony_ci if (result == 0) { 140cb93a386Sopenharmony_ci result = AVERROR_EOF; 141cb93a386Sopenharmony_ci } 142cb93a386Sopenharmony_ci return result; 143cb93a386Sopenharmony_ci} 144cb93a386Sopenharmony_ci 145cb93a386Sopenharmony_cistatic int64_t skstream_seek_packet(void* ctx, int64_t pos, int whence) { 146cb93a386Sopenharmony_ci SkStream* stream = (SkStream*)ctx; 147cb93a386Sopenharmony_ci switch (whence) { 148cb93a386Sopenharmony_ci case SEEK_SET: 149cb93a386Sopenharmony_ci break; 150cb93a386Sopenharmony_ci case SEEK_CUR: 151cb93a386Sopenharmony_ci pos = (int64_t)stream->getPosition() + pos; 152cb93a386Sopenharmony_ci break; 153cb93a386Sopenharmony_ci case SEEK_END: 154cb93a386Sopenharmony_ci pos = (int64_t)stream->getLength() + pos; 155cb93a386Sopenharmony_ci break; 156cb93a386Sopenharmony_ci default: 157cb93a386Sopenharmony_ci return -1; 158cb93a386Sopenharmony_ci } 159cb93a386Sopenharmony_ci return stream->seek(SkToSizeT(pos)) ? pos : -1; 160cb93a386Sopenharmony_ci} 161cb93a386Sopenharmony_ci 162cb93a386Sopenharmony_cistatic sk_sp<SkImage> make_yuv_420(GrRecordingContext* rContext, 163cb93a386Sopenharmony_ci int w, int h, 164cb93a386Sopenharmony_ci uint8_t* const data[], 165cb93a386Sopenharmony_ci int const strides[], 166cb93a386Sopenharmony_ci SkYUVColorSpace yuvSpace, 167cb93a386Sopenharmony_ci sk_sp<SkColorSpace> cs) { 168cb93a386Sopenharmony_ci SkYUVAInfo yuvaInfo({w, h}, 169cb93a386Sopenharmony_ci SkYUVAInfo::PlaneConfig::kY_U_V, 170cb93a386Sopenharmony_ci SkYUVAInfo::Subsampling::k420, 171cb93a386Sopenharmony_ci yuvSpace); 172cb93a386Sopenharmony_ci SkPixmap pixmaps[3]; 173cb93a386Sopenharmony_ci pixmaps[0].reset(SkImageInfo::MakeA8(w, h), data[0], strides[0]); 174cb93a386Sopenharmony_ci w = (w + 1)/2; 175cb93a386Sopenharmony_ci h = (h + 1)/2; 176cb93a386Sopenharmony_ci pixmaps[1].reset(SkImageInfo::MakeA8(w, h), data[1], strides[1]); 177cb93a386Sopenharmony_ci pixmaps[2].reset(SkImageInfo::MakeA8(w, h), data[2], strides[2]); 178cb93a386Sopenharmony_ci auto yuvaPixmaps = SkYUVAPixmaps::FromExternalPixmaps(yuvaInfo, pixmaps); 179cb93a386Sopenharmony_ci 180cb93a386Sopenharmony_ci return SkImage::MakeFromYUVAPixmaps( 181cb93a386Sopenharmony_ci rContext, yuvaPixmaps, GrMipMapped::kNo, false, std::move(cs)); 182cb93a386Sopenharmony_ci} 183cb93a386Sopenharmony_ci 184cb93a386Sopenharmony_ci// Init with illegal values, so our first compare will fail, forcing us to compute 185cb93a386Sopenharmony_ci// the skcolorspace. 186cb93a386Sopenharmony_ciSkVideoDecoder::ConvertedColorSpace::ConvertedColorSpace() 187cb93a386Sopenharmony_ci : fPrimaries(AVCOL_PRI_NB), fTransfer(AVCOL_TRC_NB) 188cb93a386Sopenharmony_ci{} 189cb93a386Sopenharmony_ci 190cb93a386Sopenharmony_civoid SkVideoDecoder::ConvertedColorSpace::update(AVColorPrimaries primaries, 191cb93a386Sopenharmony_ci AVColorTransferCharacteristic transfer) { 192cb93a386Sopenharmony_ci if (fPrimaries != primaries || fTransfer != transfer) { 193cb93a386Sopenharmony_ci fPrimaries = primaries; 194cb93a386Sopenharmony_ci fTransfer = transfer; 195cb93a386Sopenharmony_ci fCS = make_colorspace(primaries, transfer); 196cb93a386Sopenharmony_ci } 197cb93a386Sopenharmony_ci} 198cb93a386Sopenharmony_ci 199cb93a386Sopenharmony_cidouble SkVideoDecoder::computeTimeStamp(const AVFrame* frame) const { 200cb93a386Sopenharmony_ci AVRational base = fFormatCtx->streams[fStreamIndex]->time_base; 201cb93a386Sopenharmony_ci return 1.0 * frame->pts * base.num / base.den; 202cb93a386Sopenharmony_ci} 203cb93a386Sopenharmony_ci 204cb93a386Sopenharmony_cisk_sp<SkImage> SkVideoDecoder::convertFrame(const AVFrame* frame) { 205cb93a386Sopenharmony_ci auto yuv_space = get_yuvspace(frame->colorspace); 206cb93a386Sopenharmony_ci 207cb93a386Sopenharmony_ci // we have a 1-entry cache for converting colorspaces 208cb93a386Sopenharmony_ci fCSCache.update(frame->color_primaries, frame->color_trc); 209cb93a386Sopenharmony_ci 210cb93a386Sopenharmony_ci // Are these always true? If so, we don't need to check our "cache" on each frame... 211cb93a386Sopenharmony_ci SkASSERT(fDecoderCtx->colorspace == frame->colorspace); 212cb93a386Sopenharmony_ci SkASSERT(fDecoderCtx->color_primaries == frame->color_primaries); 213cb93a386Sopenharmony_ci SkASSERT(fDecoderCtx->color_trc == frame->color_trc); 214cb93a386Sopenharmony_ci 215cb93a386Sopenharmony_ci // Is this always true? If so, we might take advantage of it, knowing up-front if we support 216cb93a386Sopenharmony_ci // the format for the whole stream, in which case we might have to ask ffmpeg to convert it 217cb93a386Sopenharmony_ci // to something more reasonable (for us)... 218cb93a386Sopenharmony_ci SkASSERT(fDecoderCtx->pix_fmt == frame->format); 219cb93a386Sopenharmony_ci 220cb93a386Sopenharmony_ci switch (frame->format) { 221cb93a386Sopenharmony_ci case AV_PIX_FMT_YUV420P: 222cb93a386Sopenharmony_ci if (auto image = make_yuv_420(fRecordingContext, frame->width, frame->height, 223cb93a386Sopenharmony_ci frame->data, frame->linesize, yuv_space, fCSCache.fCS)) { 224cb93a386Sopenharmony_ci return image; 225cb93a386Sopenharmony_ci } 226cb93a386Sopenharmony_ci break; 227cb93a386Sopenharmony_ci default: 228cb93a386Sopenharmony_ci break; 229cb93a386Sopenharmony_ci } 230cb93a386Sopenharmony_ci 231cb93a386Sopenharmony_ci // General N32 fallback. 232cb93a386Sopenharmony_ci const auto info = SkImageInfo::MakeN32(frame->width, frame->height, 233cb93a386Sopenharmony_ci SkAlphaType::kOpaque_SkAlphaType); 234cb93a386Sopenharmony_ci 235cb93a386Sopenharmony_ci SkBitmap bm; 236cb93a386Sopenharmony_ci bm.allocPixels(info, info.minRowBytes()); 237cb93a386Sopenharmony_ci 238cb93a386Sopenharmony_ci constexpr auto fmt = SK_PMCOLOR_BYTE_ORDER(R,G,B,A) ? AV_PIX_FMT_RGBA : AV_PIX_FMT_BGRA; 239cb93a386Sopenharmony_ci 240cb93a386Sopenharmony_ci // TODO: should we cache these? 241cb93a386Sopenharmony_ci auto* ctx = sws_getContext(frame->width, frame->height, (AVPixelFormat)frame->format, 242cb93a386Sopenharmony_ci info.width(), info.height(), fmt, 243cb93a386Sopenharmony_ci SWS_BILINEAR, nullptr, nullptr, nullptr); 244cb93a386Sopenharmony_ci 245cb93a386Sopenharmony_ci uint8_t* dst[] = { (uint8_t*)bm.pixmap().writable_addr() }; 246cb93a386Sopenharmony_ci int dst_stride[] = { SkToInt(bm.pixmap().rowBytes()) }; 247cb93a386Sopenharmony_ci 248cb93a386Sopenharmony_ci sws_scale(ctx, frame->data, frame->linesize, 0, frame->height, dst, dst_stride); 249cb93a386Sopenharmony_ci 250cb93a386Sopenharmony_ci sws_freeContext(ctx); 251cb93a386Sopenharmony_ci 252cb93a386Sopenharmony_ci bm.setImmutable(); 253cb93a386Sopenharmony_ci 254cb93a386Sopenharmony_ci return SkImage::MakeFromBitmap(bm); 255cb93a386Sopenharmony_ci} 256cb93a386Sopenharmony_ci 257cb93a386Sopenharmony_cisk_sp<SkImage> SkVideoDecoder::nextImage(double* timeStamp) { 258cb93a386Sopenharmony_ci double defaultTimeStampStorage = 0; 259cb93a386Sopenharmony_ci if (!timeStamp) { 260cb93a386Sopenharmony_ci timeStamp = &defaultTimeStampStorage; 261cb93a386Sopenharmony_ci } 262cb93a386Sopenharmony_ci 263cb93a386Sopenharmony_ci if (fFormatCtx == nullptr) { 264cb93a386Sopenharmony_ci return nullptr; 265cb93a386Sopenharmony_ci } 266cb93a386Sopenharmony_ci 267cb93a386Sopenharmony_ci if (fMode == kProcessing_Mode) { 268cb93a386Sopenharmony_ci // We sit in a loop, waiting for the codec to have received enough data (packets) 269cb93a386Sopenharmony_ci // to have at least one frame available. 270cb93a386Sopenharmony_ci // Treat non-zero return as EOF (or error, which we will decide is also EOF) 271cb93a386Sopenharmony_ci while (!av_read_frame(fFormatCtx, &fPacket)) { 272cb93a386Sopenharmony_ci if (fPacket.stream_index != fStreamIndex) { 273cb93a386Sopenharmony_ci // got a packet for a stream other than our (video) stream, so continue 274cb93a386Sopenharmony_ci continue; 275cb93a386Sopenharmony_ci } 276cb93a386Sopenharmony_ci 277cb93a386Sopenharmony_ci int ret = avcodec_send_packet(fDecoderCtx, &fPacket); 278cb93a386Sopenharmony_ci if (ret == AVERROR(EAGAIN)) { 279cb93a386Sopenharmony_ci // may signal that we have plenty already, encouraging us to call receive_frame 280cb93a386Sopenharmony_ci // so we don't treat this as an error. 281cb93a386Sopenharmony_ci ret = 0; 282cb93a386Sopenharmony_ci } 283cb93a386Sopenharmony_ci (void)check_err(ret); // we try to continue if there was an error 284cb93a386Sopenharmony_ci 285cb93a386Sopenharmony_ci int silentList[] = { 286cb93a386Sopenharmony_ci -35, // Resource temporarily unavailable (need more packets) 287cb93a386Sopenharmony_ci 0, 288cb93a386Sopenharmony_ci }; 289cb93a386Sopenharmony_ci if (check_err(avcodec_receive_frame(fDecoderCtx, fFrame), silentList)) { 290cb93a386Sopenharmony_ci // this may be just "needs more input", so we try to continue 291cb93a386Sopenharmony_ci } else { 292cb93a386Sopenharmony_ci *timeStamp = this->computeTimeStamp(fFrame); 293cb93a386Sopenharmony_ci return this->convertFrame(fFrame); 294cb93a386Sopenharmony_ci } 295cb93a386Sopenharmony_ci } 296cb93a386Sopenharmony_ci 297cb93a386Sopenharmony_ci fMode = kDraining_Mode; 298cb93a386Sopenharmony_ci (void)avcodec_send_packet(fDecoderCtx, nullptr); // signal to start draining 299cb93a386Sopenharmony_ci } 300cb93a386Sopenharmony_ci if (fMode == kDraining_Mode) { 301cb93a386Sopenharmony_ci if (avcodec_receive_frame(fDecoderCtx, fFrame) >= 0) { 302cb93a386Sopenharmony_ci *timeStamp = this->computeTimeStamp(fFrame); 303cb93a386Sopenharmony_ci return this->convertFrame(fFrame); 304cb93a386Sopenharmony_ci } 305cb93a386Sopenharmony_ci // else we decide we're done 306cb93a386Sopenharmony_ci fMode = kDone_Mode; 307cb93a386Sopenharmony_ci } 308cb93a386Sopenharmony_ci return nullptr; 309cb93a386Sopenharmony_ci} 310cb93a386Sopenharmony_ci 311cb93a386Sopenharmony_ciSkVideoDecoder::SkVideoDecoder(GrRecordingContext* rContext) : fRecordingContext(rContext) {} 312cb93a386Sopenharmony_ci 313cb93a386Sopenharmony_ciSkVideoDecoder::~SkVideoDecoder() { 314cb93a386Sopenharmony_ci this->reset(); 315cb93a386Sopenharmony_ci} 316cb93a386Sopenharmony_ci 317cb93a386Sopenharmony_civoid SkVideoDecoder::reset() { 318cb93a386Sopenharmony_ci if (fFrame) { 319cb93a386Sopenharmony_ci av_frame_free(&fFrame); 320cb93a386Sopenharmony_ci fFrame = nullptr; 321cb93a386Sopenharmony_ci } 322cb93a386Sopenharmony_ci if (fDecoderCtx) { 323cb93a386Sopenharmony_ci avcodec_free_context(&fDecoderCtx); 324cb93a386Sopenharmony_ci fDecoderCtx = nullptr; 325cb93a386Sopenharmony_ci } 326cb93a386Sopenharmony_ci if (fFormatCtx) { 327cb93a386Sopenharmony_ci avformat_close_input(&fFormatCtx); 328cb93a386Sopenharmony_ci fFormatCtx = nullptr; 329cb93a386Sopenharmony_ci } 330cb93a386Sopenharmony_ci if (fStreamCtx) { 331cb93a386Sopenharmony_ci av_freep(&fStreamCtx->buffer); 332cb93a386Sopenharmony_ci avio_context_free(&fStreamCtx); 333cb93a386Sopenharmony_ci fStreamCtx = nullptr; 334cb93a386Sopenharmony_ci } 335cb93a386Sopenharmony_ci 336cb93a386Sopenharmony_ci fStream.reset(nullptr); 337cb93a386Sopenharmony_ci fStreamIndex = -1; 338cb93a386Sopenharmony_ci fMode = kDone_Mode; 339cb93a386Sopenharmony_ci} 340cb93a386Sopenharmony_ci 341cb93a386Sopenharmony_cibool SkVideoDecoder::loadStream(std::unique_ptr<SkStream> stream) { 342cb93a386Sopenharmony_ci this->reset(); 343cb93a386Sopenharmony_ci if (!stream) { 344cb93a386Sopenharmony_ci return false; 345cb93a386Sopenharmony_ci } 346cb93a386Sopenharmony_ci 347cb93a386Sopenharmony_ci int bufferSize = 4 * 1024; 348cb93a386Sopenharmony_ci uint8_t* buffer = (uint8_t*)av_malloc(bufferSize); 349cb93a386Sopenharmony_ci if (!buffer) { 350cb93a386Sopenharmony_ci return false; 351cb93a386Sopenharmony_ci } 352cb93a386Sopenharmony_ci 353cb93a386Sopenharmony_ci fStream = std::move(stream); 354cb93a386Sopenharmony_ci fStreamCtx = avio_alloc_context(buffer, bufferSize, 0, fStream.get(), 355cb93a386Sopenharmony_ci skstream_read_packet, nullptr, skstream_seek_packet); 356cb93a386Sopenharmony_ci if (!fStreamCtx) { 357cb93a386Sopenharmony_ci av_freep(buffer); 358cb93a386Sopenharmony_ci this->reset(); 359cb93a386Sopenharmony_ci return false; 360cb93a386Sopenharmony_ci } 361cb93a386Sopenharmony_ci 362cb93a386Sopenharmony_ci fFormatCtx = avformat_alloc_context(); 363cb93a386Sopenharmony_ci if (!fFormatCtx) { 364cb93a386Sopenharmony_ci this->reset(); 365cb93a386Sopenharmony_ci return false; 366cb93a386Sopenharmony_ci } 367cb93a386Sopenharmony_ci fFormatCtx->pb = fStreamCtx; 368cb93a386Sopenharmony_ci 369cb93a386Sopenharmony_ci int err = avformat_open_input(&fFormatCtx, nullptr, nullptr, nullptr); 370cb93a386Sopenharmony_ci if (err < 0) { 371cb93a386Sopenharmony_ci SkDebugf("avformat_open_input failed %d\n", err); 372cb93a386Sopenharmony_ci return false; 373cb93a386Sopenharmony_ci } 374cb93a386Sopenharmony_ci 375cb93a386Sopenharmony_ci AVCodec* codec; 376cb93a386Sopenharmony_ci fStreamIndex = av_find_best_stream(fFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0); 377cb93a386Sopenharmony_ci if (fStreamIndex < 0) { 378cb93a386Sopenharmony_ci SkDebugf("av_find_best_stream failed %d\n", fStreamIndex); 379cb93a386Sopenharmony_ci this->reset(); 380cb93a386Sopenharmony_ci return false; 381cb93a386Sopenharmony_ci } 382cb93a386Sopenharmony_ci 383cb93a386Sopenharmony_ci SkASSERT(codec); 384cb93a386Sopenharmony_ci fDecoderCtx = avcodec_alloc_context3(codec); 385cb93a386Sopenharmony_ci 386cb93a386Sopenharmony_ci AVStream* strm = fFormatCtx->streams[fStreamIndex]; 387cb93a386Sopenharmony_ci if ((err = avcodec_parameters_to_context(fDecoderCtx, strm->codecpar)) < 0) { 388cb93a386Sopenharmony_ci SkDebugf("avcodec_parameters_to_context failed %d\n", err); 389cb93a386Sopenharmony_ci this->reset(); 390cb93a386Sopenharmony_ci return false; 391cb93a386Sopenharmony_ci } 392cb93a386Sopenharmony_ci 393cb93a386Sopenharmony_ci if ((err = avcodec_open2(fDecoderCtx, codec, nullptr)) < 0) { 394cb93a386Sopenharmony_ci SkDebugf("avcodec_open2 failed %d\n", err); 395cb93a386Sopenharmony_ci this->reset(); 396cb93a386Sopenharmony_ci return false; 397cb93a386Sopenharmony_ci } 398cb93a386Sopenharmony_ci 399cb93a386Sopenharmony_ci fFrame = av_frame_alloc(); 400cb93a386Sopenharmony_ci SkASSERT(fFrame); 401cb93a386Sopenharmony_ci 402cb93a386Sopenharmony_ci av_init_packet(&fPacket); // is there a "free" call? 403cb93a386Sopenharmony_ci 404cb93a386Sopenharmony_ci fMode = kProcessing_Mode; 405cb93a386Sopenharmony_ci 406cb93a386Sopenharmony_ci return true; 407cb93a386Sopenharmony_ci} 408cb93a386Sopenharmony_ci 409cb93a386Sopenharmony_ciSkISize SkVideoDecoder::dimensions() const { 410cb93a386Sopenharmony_ci if (!fFormatCtx) { 411cb93a386Sopenharmony_ci return {0, 0}; 412cb93a386Sopenharmony_ci } 413cb93a386Sopenharmony_ci 414cb93a386Sopenharmony_ci AVStream* strm = fFormatCtx->streams[fStreamIndex]; 415cb93a386Sopenharmony_ci return {strm->codecpar->width, strm->codecpar->height}; 416cb93a386Sopenharmony_ci} 417cb93a386Sopenharmony_ci 418cb93a386Sopenharmony_cidouble SkVideoDecoder::duration() const { 419cb93a386Sopenharmony_ci if (!fFormatCtx) { 420cb93a386Sopenharmony_ci return 0; 421cb93a386Sopenharmony_ci } 422cb93a386Sopenharmony_ci 423cb93a386Sopenharmony_ci AVStream* strm = fFormatCtx->streams[fStreamIndex]; 424cb93a386Sopenharmony_ci AVRational base = strm->time_base; 425cb93a386Sopenharmony_ci return 1.0 * strm->duration * base.num / base.den; 426cb93a386Sopenharmony_ci} 427cb93a386Sopenharmony_ci 428cb93a386Sopenharmony_cibool SkVideoDecoder::rewind() { 429cb93a386Sopenharmony_ci auto stream = std::move(fStream); 430cb93a386Sopenharmony_ci this->reset(); 431cb93a386Sopenharmony_ci if (stream) { 432cb93a386Sopenharmony_ci stream->rewind(); 433cb93a386Sopenharmony_ci } 434cb93a386Sopenharmony_ci return this->loadStream(std::move(stream)); 435cb93a386Sopenharmony_ci} 436