1da853ecaSopenharmony_ci/* 2da853ecaSopenharmony_ci * Copyright (C) 2023 Huawei Device Co., Ltd. 3da853ecaSopenharmony_ci * Licensed under the Apache License, Version 2.0 (the "License"); 4da853ecaSopenharmony_ci * you may not use this file except in compliance with the License. 5da853ecaSopenharmony_ci * You may obtain a copy of the License at 6da853ecaSopenharmony_ci * 7da853ecaSopenharmony_ci * http://www.apache.org/licenses/LICENSE-2.0 8da853ecaSopenharmony_ci * 9da853ecaSopenharmony_ci * Unless required by applicable law or agreed to in writing, software 10da853ecaSopenharmony_ci * distributed under the License is distributed on an "AS IS" BASIS, 11da853ecaSopenharmony_ci * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12da853ecaSopenharmony_ci * See the License for the specific language governing permissions and 13da853ecaSopenharmony_ci * limitations under the License. 14da853ecaSopenharmony_ci */ 15da853ecaSopenharmony_ci 16da853ecaSopenharmony_ci#include "codec_utils.h" 17da853ecaSopenharmony_ci#include "avcodec_log.h" 18da853ecaSopenharmony_ci#include "media_description.h" 19da853ecaSopenharmony_cinamespace OHOS { 20da853ecaSopenharmony_cinamespace MediaAVCodec { 21da853ecaSopenharmony_cinamespace Codec { 22da853ecaSopenharmony_cinamespace { 23da853ecaSopenharmony_ciconstexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN_FRAMEWORK, "FCodec"}; 24da853ecaSopenharmony_ciconstexpr uint32_t INDEX_ARRAY = 2; 25da853ecaSopenharmony_cistd::map<VideoPixelFormat, AVPixelFormat> g_pixelFormatMap = { 26da853ecaSopenharmony_ci {VideoPixelFormat::YUVI420, AV_PIX_FMT_YUV420P}, 27da853ecaSopenharmony_ci {VideoPixelFormat::NV12, AV_PIX_FMT_NV12}, 28da853ecaSopenharmony_ci {VideoPixelFormat::NV21, AV_PIX_FMT_NV21}, 29da853ecaSopenharmony_ci {VideoPixelFormat::RGBA, AV_PIX_FMT_RGBA}, 30da853ecaSopenharmony_ci}; 31da853ecaSopenharmony_ci} // namespace 32da853ecaSopenharmony_ci 33da853ecaSopenharmony_ciusing namespace OHOS::Media; 34da853ecaSopenharmony_ciint32_t ConvertVideoFrame(std::shared_ptr<Scale> *scale, std::shared_ptr<AVFrame> frame, uint8_t **dstData, 35da853ecaSopenharmony_ci int32_t *dstLineSize, AVPixelFormat dstPixFmt) 36da853ecaSopenharmony_ci{ 37da853ecaSopenharmony_ci if (*scale == nullptr) { 38da853ecaSopenharmony_ci *scale = std::make_shared<Scale>(); 39da853ecaSopenharmony_ci ScalePara scalePara{static_cast<int32_t>(frame->width), static_cast<int32_t>(frame->height), 40da853ecaSopenharmony_ci static_cast<AVPixelFormat>(frame->format), static_cast<int32_t>(frame->width), 41da853ecaSopenharmony_ci static_cast<int32_t>(frame->height), dstPixFmt}; 42da853ecaSopenharmony_ci CHECK_AND_RETURN_RET_LOG((*scale)->Init(scalePara, dstData, dstLineSize) == AVCS_ERR_OK, AVCS_ERR_UNKNOWN, 43da853ecaSopenharmony_ci "Scale init error"); 44da853ecaSopenharmony_ci } 45da853ecaSopenharmony_ci return (*scale)->Convert(frame->data, frame->linesize, dstData, dstLineSize); 46da853ecaSopenharmony_ci} 47da853ecaSopenharmony_ci 48da853ecaSopenharmony_ciint32_t WriteYuvDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize, 49da853ecaSopenharmony_ci int32_t stride, const Format &format) 50da853ecaSopenharmony_ci{ 51da853ecaSopenharmony_ci int32_t height; 52da853ecaSopenharmony_ci int32_t fmt; 53da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height); 54da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt); 55da853ecaSopenharmony_ci VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt); 56da853ecaSopenharmony_ci CHECK_AND_RETURN_RET_LOG(pixFmt == VideoPixelFormat::YUVI420 || pixFmt == VideoPixelFormat::NV12 || 57da853ecaSopenharmony_ci pixFmt == VideoPixelFormat::NV21, 58da853ecaSopenharmony_ci AVCS_ERR_UNSUPPORT, "pixFmt: %{public}d do not support", pixFmt); 59da853ecaSopenharmony_ci int32_t srcPos = 0; 60da853ecaSopenharmony_ci int32_t dstPos = 0; 61da853ecaSopenharmony_ci int32_t dataSize = scaleLineSize[0]; 62da853ecaSopenharmony_ci int32_t writeSize = dataSize > stride ? stride : dataSize; 63da853ecaSopenharmony_ci for (int32_t colNum = 0; colNum < height; colNum++) { 64da853ecaSopenharmony_ci memory->Write(scaleData[0] + srcPos, writeSize, dstPos); 65da853ecaSopenharmony_ci dstPos += stride; 66da853ecaSopenharmony_ci srcPos += dataSize; 67da853ecaSopenharmony_ci } 68da853ecaSopenharmony_ci srcPos = 0; 69da853ecaSopenharmony_ci if (pixFmt == VideoPixelFormat::YUVI420) { 70da853ecaSopenharmony_ci dataSize = scaleLineSize[1]; 71da853ecaSopenharmony_ci writeSize = dataSize > (stride / UV_SCALE_FACTOR) ? (stride / UV_SCALE_FACTOR) : dataSize; 72da853ecaSopenharmony_ci for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) { 73da853ecaSopenharmony_ci memory->Write(scaleData[1] + srcPos, writeSize, dstPos); 74da853ecaSopenharmony_ci dstPos += (stride / UV_SCALE_FACTOR); 75da853ecaSopenharmony_ci srcPos += dataSize; 76da853ecaSopenharmony_ci } 77da853ecaSopenharmony_ci srcPos = 0; 78da853ecaSopenharmony_ci for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) { 79da853ecaSopenharmony_ci memory->Write(scaleData[INDEX_ARRAY] + srcPos, writeSize, dstPos); 80da853ecaSopenharmony_ci dstPos += (stride / UV_SCALE_FACTOR); 81da853ecaSopenharmony_ci srcPos += dataSize; 82da853ecaSopenharmony_ci } 83da853ecaSopenharmony_ci } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) { 84da853ecaSopenharmony_ci dataSize = scaleLineSize[1]; 85da853ecaSopenharmony_ci writeSize = dataSize > stride ? stride : dataSize; 86da853ecaSopenharmony_ci for (int32_t colNum = 0; colNum < (height / UV_SCALE_FACTOR); colNum++) { 87da853ecaSopenharmony_ci memory->Write(scaleData[1] + srcPos, writeSize, dstPos); 88da853ecaSopenharmony_ci dstPos += stride; 89da853ecaSopenharmony_ci srcPos += dataSize; 90da853ecaSopenharmony_ci } 91da853ecaSopenharmony_ci } 92da853ecaSopenharmony_ci AVCODEC_LOGD("WriteYuvDataStride success"); 93da853ecaSopenharmony_ci return AVCS_ERR_OK; 94da853ecaSopenharmony_ci} 95da853ecaSopenharmony_ci 96da853ecaSopenharmony_ciint32_t WriteRgbDataStride(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize, 97da853ecaSopenharmony_ci int32_t stride, const Format &format) 98da853ecaSopenharmony_ci{ 99da853ecaSopenharmony_ci int32_t height; 100da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height); 101da853ecaSopenharmony_ci int32_t srcPos = 0; 102da853ecaSopenharmony_ci int32_t dstPos = 0; 103da853ecaSopenharmony_ci int32_t dataSize = scaleLineSize[0]; 104da853ecaSopenharmony_ci int32_t writeSize = dataSize > stride ? stride : dataSize; 105da853ecaSopenharmony_ci for (int32_t colNum = 0; colNum < height; colNum++) { 106da853ecaSopenharmony_ci memory->Write(scaleData[0] + srcPos, writeSize, dstPos); 107da853ecaSopenharmony_ci dstPos += stride; 108da853ecaSopenharmony_ci srcPos += dataSize; 109da853ecaSopenharmony_ci } 110da853ecaSopenharmony_ci 111da853ecaSopenharmony_ci AVCODEC_LOGD("WriteRgbDataStride success"); 112da853ecaSopenharmony_ci return AVCS_ERR_OK; 113da853ecaSopenharmony_ci} 114da853ecaSopenharmony_ci 115da853ecaSopenharmony_ciint32_t WriteYuvData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize, 116da853ecaSopenharmony_ci int32_t &height, VideoPixelFormat &pixFmt) 117da853ecaSopenharmony_ci{ 118da853ecaSopenharmony_ci int32_t ySize = static_cast<int32_t>(scaleLineSize[0] * height); // yuv420: 411 nv21 119da853ecaSopenharmony_ci int32_t uvSize = static_cast<int32_t>(scaleLineSize[1] * height / 2); // 2 120da853ecaSopenharmony_ci int32_t frameSize = 0; 121da853ecaSopenharmony_ci if (pixFmt == VideoPixelFormat::YUVI420) { 122da853ecaSopenharmony_ci frameSize = ySize + (uvSize * 2); // 2 123da853ecaSopenharmony_ci } else if (pixFmt == VideoPixelFormat::NV21 || pixFmt == VideoPixelFormat::NV12) { 124da853ecaSopenharmony_ci frameSize = ySize + uvSize; 125da853ecaSopenharmony_ci } 126da853ecaSopenharmony_ci CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY, 127da853ecaSopenharmony_ci "output buffer size is not enough: real[%{public}d], need[%{public}u]", 128da853ecaSopenharmony_ci memory->GetCapacity(), frameSize); 129da853ecaSopenharmony_ci if (pixFmt == VideoPixelFormat::YUVI420) { 130da853ecaSopenharmony_ci memory->Write(scaleData[0], ySize); 131da853ecaSopenharmony_ci memory->Write(scaleData[1], uvSize); 132da853ecaSopenharmony_ci memory->Write(scaleData[2], uvSize); // 2 133da853ecaSopenharmony_ci } else if ((pixFmt == VideoPixelFormat::NV12) || (pixFmt == VideoPixelFormat::NV21)) { 134da853ecaSopenharmony_ci memory->Write(scaleData[0], ySize); 135da853ecaSopenharmony_ci memory->Write(scaleData[1], uvSize); 136da853ecaSopenharmony_ci } else { 137da853ecaSopenharmony_ci return AVCS_ERR_UNSUPPORT; 138da853ecaSopenharmony_ci } 139da853ecaSopenharmony_ci return AVCS_ERR_OK; 140da853ecaSopenharmony_ci} 141da853ecaSopenharmony_ci 142da853ecaSopenharmony_ciint32_t WriteRgbData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, const int32_t *scaleLineSize, 143da853ecaSopenharmony_ci int32_t &height) 144da853ecaSopenharmony_ci{ 145da853ecaSopenharmony_ci int32_t frameSize = static_cast<int32_t>(scaleLineSize[0] * height); 146da853ecaSopenharmony_ci CHECK_AND_RETURN_RET_LOG(memory->GetCapacity() >= frameSize, AVCS_ERR_NO_MEMORY, 147da853ecaSopenharmony_ci "output buffer size is not enough: real[%{public}d], need[%{public}u]", 148da853ecaSopenharmony_ci memory->GetCapacity(), frameSize); 149da853ecaSopenharmony_ci memory->Write(scaleData[0], frameSize); 150da853ecaSopenharmony_ci return AVCS_ERR_OK; 151da853ecaSopenharmony_ci} 152da853ecaSopenharmony_ci 153da853ecaSopenharmony_ciint32_t WriteSurfaceData(const std::shared_ptr<AVMemory> &memory, struct SurfaceInfo &surfaceInfo, const Format &format) 154da853ecaSopenharmony_ci{ 155da853ecaSopenharmony_ci int32_t height; 156da853ecaSopenharmony_ci int32_t fmt; 157da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height); 158da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt); 159da853ecaSopenharmony_ci VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt); 160da853ecaSopenharmony_ci if (surfaceInfo.surfaceFence != nullptr) { 161da853ecaSopenharmony_ci surfaceInfo.surfaceFence->Wait(100); // 100ms 162da853ecaSopenharmony_ci } 163da853ecaSopenharmony_ci uint32_t yScaleLineSize = static_cast<uint32_t>(surfaceInfo.scaleLineSize[0]); 164da853ecaSopenharmony_ci if (IsYuvFormat(pixFmt)) { 165da853ecaSopenharmony_ci if (surfaceInfo.surfaceStride % yScaleLineSize) { 166da853ecaSopenharmony_ci return WriteYuvDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, 167da853ecaSopenharmony_ci surfaceInfo.surfaceStride, format); 168da853ecaSopenharmony_ci } 169da853ecaSopenharmony_ci WriteYuvData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height, pixFmt); 170da853ecaSopenharmony_ci } else if (IsRgbFormat(pixFmt)) { 171da853ecaSopenharmony_ci if (surfaceInfo.surfaceStride % yScaleLineSize) { 172da853ecaSopenharmony_ci return WriteRgbDataStride(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, 173da853ecaSopenharmony_ci surfaceInfo.surfaceStride, format); 174da853ecaSopenharmony_ci } 175da853ecaSopenharmony_ci WriteRgbData(memory, surfaceInfo.scaleData, surfaceInfo.scaleLineSize, height); 176da853ecaSopenharmony_ci } else { 177da853ecaSopenharmony_ci AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt); 178da853ecaSopenharmony_ci return AVCS_ERR_UNSUPPORT; 179da853ecaSopenharmony_ci } 180da853ecaSopenharmony_ci return AVCS_ERR_OK; 181da853ecaSopenharmony_ci} 182da853ecaSopenharmony_ci 183da853ecaSopenharmony_ciint32_t WriteBufferData(const std::shared_ptr<AVMemory> &memory, uint8_t **scaleData, int32_t *scaleLineSize, 184da853ecaSopenharmony_ci const Format &format) 185da853ecaSopenharmony_ci{ 186da853ecaSopenharmony_ci int32_t height; 187da853ecaSopenharmony_ci int32_t width; 188da853ecaSopenharmony_ci int32_t fmt; 189da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, height); 190da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_WIDTH, width); 191da853ecaSopenharmony_ci format.GetIntValue(MediaDescriptionKey::MD_KEY_PIXEL_FORMAT, fmt); 192da853ecaSopenharmony_ci VideoPixelFormat pixFmt = static_cast<VideoPixelFormat>(fmt); 193da853ecaSopenharmony_ci 194da853ecaSopenharmony_ci if (IsYuvFormat(pixFmt)) { 195da853ecaSopenharmony_ci if (scaleLineSize[0] % width) { 196da853ecaSopenharmony_ci return WriteYuvDataStride(memory, scaleData, scaleLineSize, width, format); 197da853ecaSopenharmony_ci } 198da853ecaSopenharmony_ci WriteYuvData(memory, scaleData, scaleLineSize, height, pixFmt); 199da853ecaSopenharmony_ci } else if (IsRgbFormat(pixFmt)) { 200da853ecaSopenharmony_ci if (scaleLineSize[0] % width) { 201da853ecaSopenharmony_ci return WriteRgbDataStride(memory, scaleData, scaleLineSize, width * VIDEO_PIX_DEPTH_RGBA, format); 202da853ecaSopenharmony_ci } 203da853ecaSopenharmony_ci WriteRgbData(memory, scaleData, scaleLineSize, height); 204da853ecaSopenharmony_ci } else { 205da853ecaSopenharmony_ci AVCODEC_LOGE("Fill frame buffer failed : unsupported pixel format: %{public}d", pixFmt); 206da853ecaSopenharmony_ci return AVCS_ERR_UNSUPPORT; 207da853ecaSopenharmony_ci } 208da853ecaSopenharmony_ci return AVCS_ERR_OK; 209da853ecaSopenharmony_ci} 210da853ecaSopenharmony_ci 211da853ecaSopenharmony_cistd::string AVStrError(int errnum) 212da853ecaSopenharmony_ci{ 213da853ecaSopenharmony_ci char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0}; 214da853ecaSopenharmony_ci av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE); 215da853ecaSopenharmony_ci return std::string(errbuf); 216da853ecaSopenharmony_ci} 217da853ecaSopenharmony_ci 218da853ecaSopenharmony_ciGraphicTransformType TranslateSurfaceRotation(const VideoRotation &rotation) 219da853ecaSopenharmony_ci{ 220da853ecaSopenharmony_ci switch (rotation) { 221da853ecaSopenharmony_ci case VideoRotation::VIDEO_ROTATION_90: { 222da853ecaSopenharmony_ci return GRAPHIC_ROTATE_270; 223da853ecaSopenharmony_ci } 224da853ecaSopenharmony_ci case VideoRotation::VIDEO_ROTATION_180: { 225da853ecaSopenharmony_ci return GRAPHIC_ROTATE_180; 226da853ecaSopenharmony_ci } 227da853ecaSopenharmony_ci case VideoRotation::VIDEO_ROTATION_270: { 228da853ecaSopenharmony_ci return GRAPHIC_ROTATE_90; 229da853ecaSopenharmony_ci } 230da853ecaSopenharmony_ci default: 231da853ecaSopenharmony_ci return GRAPHIC_ROTATE_NONE; 232da853ecaSopenharmony_ci } 233da853ecaSopenharmony_ci} 234da853ecaSopenharmony_ci 235da853ecaSopenharmony_ciGraphicPixelFormat TranslateSurfaceFormat(const VideoPixelFormat &surfaceFormat) 236da853ecaSopenharmony_ci{ 237da853ecaSopenharmony_ci switch (surfaceFormat) { 238da853ecaSopenharmony_ci case VideoPixelFormat::YUVI420: { 239da853ecaSopenharmony_ci return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_P; 240da853ecaSopenharmony_ci } 241da853ecaSopenharmony_ci case VideoPixelFormat::RGBA: { 242da853ecaSopenharmony_ci return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888; 243da853ecaSopenharmony_ci } 244da853ecaSopenharmony_ci case VideoPixelFormat::NV12: { 245da853ecaSopenharmony_ci return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_SP; 246da853ecaSopenharmony_ci } 247da853ecaSopenharmony_ci case VideoPixelFormat::NV21: { 248da853ecaSopenharmony_ci return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_420_SP; 249da853ecaSopenharmony_ci } 250da853ecaSopenharmony_ci default: 251da853ecaSopenharmony_ci return GraphicPixelFormat::GRAPHIC_PIXEL_FMT_BUTT; 252da853ecaSopenharmony_ci } 253da853ecaSopenharmony_ci} 254da853ecaSopenharmony_ci 255da853ecaSopenharmony_ciVideoPixelFormat ConvertPixelFormatFromFFmpeg(int32_t ffmpegPixelFormat) 256da853ecaSopenharmony_ci{ 257da853ecaSopenharmony_ci auto iter = std::find_if( 258da853ecaSopenharmony_ci g_pixelFormatMap.begin(), g_pixelFormatMap.end(), 259da853ecaSopenharmony_ci [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.second == ffmpegPixelFormat; }); 260da853ecaSopenharmony_ci return iter == g_pixelFormatMap.end() ? VideoPixelFormat::UNKNOWN : iter->first; 261da853ecaSopenharmony_ci} 262da853ecaSopenharmony_ci 263da853ecaSopenharmony_ciAVPixelFormat ConvertPixelFormatToFFmpeg(VideoPixelFormat pixelFormat) 264da853ecaSopenharmony_ci{ 265da853ecaSopenharmony_ci auto iter = std::find_if( 266da853ecaSopenharmony_ci g_pixelFormatMap.begin(), g_pixelFormatMap.end(), 267da853ecaSopenharmony_ci [&](const std::pair<VideoPixelFormat, AVPixelFormat> &tmp) -> bool { return tmp.first == pixelFormat; }); 268da853ecaSopenharmony_ci return iter == g_pixelFormatMap.end() ? AV_PIX_FMT_NONE : iter->second; 269da853ecaSopenharmony_ci} 270da853ecaSopenharmony_ci 271da853ecaSopenharmony_cibool IsYuvFormat(VideoPixelFormat &format) 272da853ecaSopenharmony_ci{ 273da853ecaSopenharmony_ci return (format == VideoPixelFormat::YUVI420 || format == VideoPixelFormat::NV12 || 274da853ecaSopenharmony_ci format == VideoPixelFormat::NV21); 275da853ecaSopenharmony_ci} 276da853ecaSopenharmony_ci 277da853ecaSopenharmony_cibool IsRgbFormat(VideoPixelFormat &format) 278da853ecaSopenharmony_ci{ 279da853ecaSopenharmony_ci return (format == VideoPixelFormat::RGBA); 280da853ecaSopenharmony_ci} 281da853ecaSopenharmony_ci 282da853ecaSopenharmony_ciint32_t Scale::Init(const ScalePara &scalePara, uint8_t **dstData, int32_t *dstLineSize) 283da853ecaSopenharmony_ci{ 284da853ecaSopenharmony_ci scalePara_ = scalePara; 285da853ecaSopenharmony_ci if (swsCtx_ != nullptr) { 286da853ecaSopenharmony_ci return AVCS_ERR_OK; 287da853ecaSopenharmony_ci } 288da853ecaSopenharmony_ci auto swsContext = 289da853ecaSopenharmony_ci sws_getContext(scalePara_.srcWidth, scalePara_.srcHeight, scalePara_.srcFfFmt, scalePara_.dstWidth, 290da853ecaSopenharmony_ci scalePara_.dstHeight, scalePara_.dstFfFmt, SWS_FAST_BILINEAR, nullptr, nullptr, nullptr); 291da853ecaSopenharmony_ci if (swsContext == nullptr) { 292da853ecaSopenharmony_ci return AVCS_ERR_UNKNOWN; 293da853ecaSopenharmony_ci } 294da853ecaSopenharmony_ci swsCtx_ = std::shared_ptr<SwsContext>(swsContext, [](struct SwsContext *ptr) { 295da853ecaSopenharmony_ci if (ptr != nullptr) { 296da853ecaSopenharmony_ci sws_freeContext(ptr); 297da853ecaSopenharmony_ci } 298da853ecaSopenharmony_ci }); 299da853ecaSopenharmony_ci auto ret = av_image_alloc(dstData, dstLineSize, scalePara_.dstWidth, scalePara_.dstHeight, scalePara_.dstFfFmt, 300da853ecaSopenharmony_ci scalePara_.align); 301da853ecaSopenharmony_ci if (ret < 0) { 302da853ecaSopenharmony_ci return AVCS_ERR_UNKNOWN; 303da853ecaSopenharmony_ci } 304da853ecaSopenharmony_ci for (int32_t i = 0; dstLineSize[i] > 0; i++) { 305da853ecaSopenharmony_ci if (dstData[i] && !dstLineSize[i]) { 306da853ecaSopenharmony_ci return AVCS_ERR_UNKNOWN; 307da853ecaSopenharmony_ci } 308da853ecaSopenharmony_ci } 309da853ecaSopenharmony_ci return AVCS_ERR_OK; 310da853ecaSopenharmony_ci} 311da853ecaSopenharmony_ci 312da853ecaSopenharmony_ciint32_t Scale::Convert(uint8_t **srcData, const int32_t *srcLineSize, uint8_t **dstData, int32_t *dstLineSize) 313da853ecaSopenharmony_ci{ 314da853ecaSopenharmony_ci auto res = sws_scale(swsCtx_.get(), srcData, srcLineSize, 0, scalePara_.srcHeight, dstData, dstLineSize); 315da853ecaSopenharmony_ci if (res < 0) { 316da853ecaSopenharmony_ci return AVCS_ERR_UNKNOWN; 317da853ecaSopenharmony_ci } 318da853ecaSopenharmony_ci return AVCS_ERR_OK; 319da853ecaSopenharmony_ci} 320da853ecaSopenharmony_ci} // namespace Codec 321da853ecaSopenharmony_ci} // namespace MediaAVCodec 322da853ecaSopenharmony_ci} // namespace OHOS