Lines Matching refs:ctx
147 NvencContext *ctx = avctx->priv_data;
148 NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &ctx->nvenc_dload_funcs.nvenc_funcs;
150 if (p_nvenc && ctx->nvencoder)
151 details = p_nvenc->nvEncGetLastErrorString(ctx->nvencoder);
169 static void nvenc_map_preset(NvencContext *ctx)
209 GUIDTuple *t = &presets[ctx->preset];
211 ctx->init_encode_params.presetGUID = t->guid;
212 ctx->flags = t->flags;
215 if (ctx->tuning_info == NV_ENC_TUNING_INFO_LOSSLESS)
216 ctx->flags |= NVENC_LOSSLESS;
281 NvencContext *ctx = avctx->priv_data;
282 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
325 NvencContext *ctx = avctx->priv_data;
326 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
328 if (ctx->d3d11_device)
331 return CHECK_CU(dl_fn->cuda_dl->cuCtxPushCurrent(ctx->cu_context));
336 NvencContext *ctx = avctx->priv_data;
337 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
340 if (ctx->d3d11_device)
349 NvencContext *ctx = avctx->priv_data;
350 NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &ctx->nvenc_dload_funcs.nvenc_funcs;
355 if (ctx->d3d11_device) {
356 params.device = ctx->d3d11_device;
359 params.device = ctx->cu_context;
363 ret = p_nvenc->nvEncOpenEncodeSessionEx(¶ms, &ctx->nvencoder);
365 ctx->nvencoder = NULL;
374 NvencContext *ctx = avctx->priv_data;
375 NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &ctx->nvenc_dload_funcs.nvenc_funcs;
379 ret = p_nvenc->nvEncGetEncodeGUIDCount(ctx->nvencoder, &count);
388 ret = p_nvenc->nvEncGetEncodeGUIDs(ctx->nvencoder, guids, count, &count);
396 if (!memcmp(&guids[i], &ctx->init_encode_params.encodeGUID, sizeof(*guids))) {
410 NvencContext *ctx = avctx->priv_data;
411 NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &ctx->nvenc_dload_funcs.nvenc_funcs;
418 ret = p_nvenc->nvEncGetEncodeCaps(ctx->nvencoder, ctx->init_encode_params.encodeGUID, ¶ms, &val);
427 NvencContext *ctx = avctx->priv_data;
437 if (IS_YUV444(ctx->data_pix_fmt) && ret <= 0) {
443 if (ctx->flags & NVENC_LOSSLESS && ret <= 0) {
479 if (IS_10BIT(ctx->data_pix_fmt) && ret <= 0) {
485 if (ctx->rc_lookahead > 0 && ret <= 0) {
491 if (ctx->temporal_aq > 0 && ret <= 0) {
497 if (ctx->weighted_pred > 0 && ret <= 0) {
503 if (ctx->coder == NV_ENC_H264_ENTROPY_CODING_MODE_CABAC && ret <= 0) {
510 if (ctx->b_ref_mode == NV_ENC_BFRAME_REF_MODE_EACH && ret != 1 && ret != 3) {
513 } else if (ctx->b_ref_mode != NV_ENC_BFRAME_REF_MODE_DISABLED && ret == 0) {
518 if (ctx->b_ref_mode != 0) {
539 if(ctx->single_slice_intra_refresh && ret <= 0) {
544 if(ctx->single_slice_intra_refresh) {
551 if((ctx->intra_refresh || ctx->single_slice_intra_refresh) && ret <= 0) {
557 if (ctx->constrained_encoding && avctx->codec->id == AV_CODEC_ID_HEVC) {
564 if(ctx->constrained_encoding && ret <= 0) {
569 ctx->support_dyn_bitrate = nvenc_check_cap(avctx, NV_ENC_CAPS_SUPPORT_DYN_BITRATE_CHANGE);
576 NvencContext *ctx = avctx->priv_data;
577 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
584 if (ctx->device == LIST_DEVICES)
605 if (ctx->device != idx && ctx->device != ANY_DEVICE)
608 ret = CHECK_CU(dl_fn->cuda_dl->cuCtxCreate(&ctx->cu_context_internal, 0, cu_device));
612 ctx->cu_context = ctx->cu_context_internal;
613 ctx->cu_stream = NULL;
628 if (ctx->device == idx || ctx->device == ANY_DEVICE)
635 p_nvenc->nvEncDestroyEncoder(ctx->nvencoder);
636 ctx->nvencoder = NULL;
642 CHECK_CU(dl_fn->cuda_dl->cuCtxDestroy(ctx->cu_context_internal));
643 ctx->cu_context_internal = NULL;
651 NvencContext *ctx = avctx->priv_data;
652 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
656 ctx->init_encode_params.encodeGUID = NV_ENC_CODEC_H264_GUID;
659 ctx->init_encode_params.encodeGUID = NV_ENC_CODEC_HEVC_GUID;
665 nvenc_map_preset(ctx);
667 if (ctx->flags & NVENC_DEPRECATED_PRESET)
704 ctx->cu_context = cuda_device_hwctx->cuda_ctx;
705 ctx->cu_stream = cuda_device_hwctx->stream;
709 ctx->d3d11_device = d3d11_device_hwctx->device;
710 ID3D11Device_AddRef(ctx->d3d11_device);
741 if ((nvenc_check_device(avctx, i)) >= 0 && ctx->device != LIST_DEVICES)
745 if (ctx->device == LIST_DEVICES)
753 av_log(avctx, AV_LOG_FATAL, "Requested GPU %d, but only %d GPUs are available!\n", ctx->device, nb_devices);
762 NvencContext *ctx = avctx->priv_data;
763 NV_ENC_RC_PARAMS *rc = &ctx->encode_config.rcParams;
767 if (ctx->init_qp_p >= 0) {
768 rc->constQP.qpInterP = ctx->init_qp_p;
769 if (ctx->init_qp_i >= 0 && ctx->init_qp_b >= 0) {
770 rc->constQP.qpIntra = ctx->init_qp_i;
771 rc->constQP.qpInterB = ctx->init_qp_b;
781 } else if (ctx->cqp >= 0) {
782 rc->constQP.qpInterP = rc->constQP.qpInterB = rc->constQP.qpIntra = ctx->cqp;
784 rc->constQP.qpInterB = av_clip(ctx->cqp * fabs(avctx->b_quant_factor) + avctx->b_quant_offset + 0.5, 0, 51);
786 rc->constQP.qpIntra = av_clip(ctx->cqp * fabs(avctx->i_quant_factor) + avctx->i_quant_offset + 0.5, 0, 51);
795 NvencContext *ctx = avctx->priv_data;
796 NV_ENC_RC_PARAMS *rc = &ctx->encode_config.rcParams;
826 if (ctx->init_qp_p < 0) {
829 rc->initialRCQP.qpInterP = ctx->init_qp_p;
832 if (ctx->init_qp_i < 0) {
840 rc->initialRCQP.qpIntra = ctx->init_qp_i;
843 if (ctx->init_qp_b < 0) {
851 rc->initialRCQP.qpInterB = ctx->init_qp_b;
857 NvencContext *ctx = avctx->priv_data;
858 NV_ENC_RC_PARAMS *rc = &ctx->encode_config.rcParams;
871 NvencContext *ctx = avctx->priv_data;
872 NV_ENC_RC_PARAMS *rc = &ctx->encode_config.rcParams;
874 switch (ctx->rc) {
897 rc->rateControlMode = ctx->rc;
902 NvencContext *ctx = avctx->priv_data;
906 int nb_surfaces = FFMAX(4, ctx->encode_config.frameIntervalP * 2 * 2);
909 if (ctx->rc_lookahead > 0) {
912 nb_surfaces = FFMAX(1, FFMAX(nb_surfaces, ctx->rc_lookahead + ctx->encode_config.frameIntervalP + 1 + 4));
913 if (nb_surfaces > ctx->nb_surfaces && ctx->nb_surfaces > 0)
917 "increasing used surfaces %d -> %d\n", ctx->nb_surfaces, nb_surfaces);
919 ctx->nb_surfaces = FFMAX(nb_surfaces, ctx->nb_surfaces);
921 if (ctx->encode_config.frameIntervalP > 1 && ctx->nb_surfaces < nb_surfaces && ctx->nb_surfaces > 0)
925 "increasing used surfaces %d -> %d\n", ctx->nb_surfaces, nb_surfaces);
926 ctx->nb_surfaces = FFMAX(ctx->nb_surfaces, nb_surfaces);
928 else if (ctx->nb_surfaces <= 0)
929 ctx->nb_surfaces = nb_surfaces;
933 ctx->nb_surfaces = FFMAX(1, FFMIN(MAX_REGISTERED_FRAMES, ctx->nb_surfaces));
934 ctx->async_depth = FFMIN(ctx->async_depth, ctx->nb_surfaces - 1);
941 NvencContext *ctx = avctx->priv_data;
946 if (ctx->cqp < 0 && avctx->global_quality > 0)
947 ctx->cqp = avctx->global_quality;
950 ctx->encode_config.rcParams.averageBitRate = avctx->bit_rate;
951 } else if (ctx->encode_config.rcParams.averageBitRate > 0) {
952 ctx->encode_config.rcParams.maxBitRate = ctx->encode_config.rcParams.averageBitRate;
956 ctx->encode_config.rcParams.maxBitRate = avctx->rc_max_rate;
959 ctx->encode_config.rcParams.multiPass = ctx->multipass;
961 if (ctx->flags & NVENC_ONE_PASS)
962 ctx->encode_config.rcParams.multiPass = NV_ENC_MULTI_PASS_DISABLED;
963 if (ctx->flags & NVENC_TWO_PASSES || ctx->twopass > 0)
964 ctx->encode_config.rcParams.multiPass = NV_ENC_TWO_PASS_FULL_RESOLUTION;
966 if (ctx->rc < 0) {
967 if (ctx->cbr) {
968 ctx->rc = NV_ENC_PARAMS_RC_CBR;
969 } else if (ctx->cqp >= 0) {
970 ctx->rc = NV_ENC_PARAMS_RC_CONSTQP;
971 } else if (ctx->quality >= 0.0f) {
972 ctx->rc = NV_ENC_PARAMS_RC_VBR;
976 if (ctx->rc < 0) {
977 if (ctx->flags & NVENC_ONE_PASS)
978 ctx->twopass = 0;
979 if (ctx->flags & NVENC_TWO_PASSES)
980 ctx->twopass = 1;
982 if (ctx->twopass < 0)
983 ctx->twopass = (ctx->flags & NVENC_LOWLATENCY) != 0;
985 if (ctx->cbr) {
986 if (ctx->twopass) {
987 ctx->rc = NV_ENC_PARAMS_RC_CBR_LOWDELAY_HQ;
989 ctx->rc = NV_ENC_PARAMS_RC_CBR;
991 } else if (ctx->cqp >= 0) {
992 ctx->rc = NV_ENC_PARAMS_RC_CONSTQP;
993 } else if (ctx->twopass) {
994 ctx->rc = NV_ENC_PARAMS_RC_VBR_HQ;
996 ctx->rc = NV_ENC_PARAMS_RC_VBR_MINQP;
1001 if (ctx->rc >= 0 && ctx->rc & RC_MODE_DEPRECATED) {
1005 ctx->rc &= ~RC_MODE_DEPRECATED;
1009 ctx->encode_config.rcParams.cbQPIndexOffset = ctx->qp_cb_offset;
1010 ctx->encode_config.rcParams.crQPIndexOffset = ctx->qp_cr_offset;
1012 if (ctx->qp_cb_offset || ctx->qp_cr_offset)
1017 if (ctx->ldkfs)
1018 ctx->encode_config.rcParams.lowDelayKeyFrameScale = ctx->ldkfs;
1021 if (ctx->flags & NVENC_LOSSLESS) {
1023 } else if (ctx->rc >= 0) {
1026 ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_VBR;
1031 ctx->encode_config.rcParams.vbvBufferSize = avctx->rc_buffer_size;
1032 } else if (ctx->encode_config.rcParams.averageBitRate > 0) {
1033 avctx->rc_buffer_size = ctx->encode_config.rcParams.vbvBufferSize = 2 * ctx->encode_config.rcParams.averageBitRate;
1036 if (ctx->aq) {
1037 ctx->encode_config.rcParams.enableAQ = 1;
1038 ctx->encode_config.rcParams.aqStrength = ctx->aq_strength;
1042 if (ctx->temporal_aq) {
1043 ctx->encode_config.rcParams.enableTemporalAQ = 1;
1047 if (ctx->rc_lookahead > 0) {
1048 int lkd_bound = FFMIN(ctx->nb_surfaces, ctx->async_depth) -
1049 ctx->encode_config.frameIntervalP - 4;
1055 ctx->encode_config.rcParams.enableLookahead = 1;
1056 ctx->encode_config.rcParams.lookaheadDepth = av_clip(ctx->rc_lookahead, 0, lkd_bound);
1057 ctx->encode_config.rcParams.disableIadapt = ctx->no_scenecut;
1058 ctx->encode_config.rcParams.disableBadapt = !ctx->b_adapt;
1061 ctx->encode_config.rcParams.lookaheadDepth,
1062 ctx->encode_config.rcParams.disableIadapt ? "disabled" : "enabled",
1063 ctx->encode_config.rcParams.disableBadapt ? "disabled" : "enabled");
1067 if (ctx->strict_gop) {
1068 ctx->encode_config.rcParams.strictGOPTarget = 1;
1072 if (ctx->nonref_p)
1073 ctx->encode_config.rcParams.enableNonRefP = 1;
1075 if (ctx->zerolatency)
1076 ctx->encode_config.rcParams.zeroReorderDelay = 1;
1078 if (ctx->quality) {
1080 int tmp_quality = (int)(ctx->quality * 256.0f);
1081 ctx->encode_config.rcParams.targetQuality = (uint8_t)(tmp_quality >> 8);
1082 ctx->encode_config.rcParams.targetQualityLSB = (uint8_t)(tmp_quality & 0xff);
1087 ctx->encode_config.rcParams.averageBitRate = avctx->bit_rate = 0;
1088 ctx->encode_config.rcParams.vbvBufferSize = avctx->rc_buffer_size = 0;
1089 ctx->encode_config.rcParams.maxBitRate = avctx->rc_max_rate;
1095 NvencContext *ctx = avctx->priv_data;
1096 NV_ENC_CONFIG *cc = &ctx->encode_config;
1100 vui->colourMatrix = IS_GBRP(ctx->data_pix_fmt) ? AVCOL_SPC_RGB : avctx->colorspace;
1104 || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ420P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ422P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ444P);
1117 if (ctx->intra_refresh) {
1122 h264->singleSliceIntraRefresh = ctx->single_slice_intra_refresh;
1126 if (ctx->constrained_encoding)
1131 h264->outputAUD = ctx->aud;
1133 if (ctx->dpb_size >= 0) {
1135 h264->maxNumRefFrames = ctx->dpb_size;
1138 if (ctx->intra_refresh) {
1157 if (ctx->flags & NVENC_LOSSLESS) {
1160 switch(ctx->profile) {
1181 if (IS_YUV444(ctx->data_pix_fmt)) {
1188 h264->level = ctx->level;
1190 if (ctx->coder >= 0)
1191 h264->entropyCodingMode = ctx->coder;
1194 h264->useBFramesAsRef = ctx->b_ref_mode;
1207 NvencContext *ctx = avctx->priv_data;
1208 NV_ENC_CONFIG *cc = &ctx->encode_config;
1212 vui->colourMatrix = IS_GBRP(ctx->data_pix_fmt) ? AVCOL_SPC_RGB : avctx->colorspace;
1216 || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ420P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ422P || ctx->data_pix_fmt == AV_PIX_FMT_YUVJ444P);
1229 if (ctx->intra_refresh) {
1234 hevc->singleSliceIntraRefresh = ctx->single_slice_intra_refresh;
1239 if (ctx->constrained_encoding)
1245 hevc->outputAUD = ctx->aud;
1247 if (ctx->dpb_size >= 0) {
1249 hevc->maxNumRefFramesInDPB = ctx->dpb_size;
1252 if (ctx->intra_refresh) {
1264 switch (ctx->profile) {
1280 if (IS_10BIT(ctx->data_pix_fmt)) {
1286 if (IS_YUV444(ctx->data_pix_fmt)) {
1291 hevc->chromaFormatIDC = IS_YUV444(ctx->data_pix_fmt) ? 3 : 1;
1293 hevc->pixelBitDepthMinus8 = IS_10BIT(ctx->data_pix_fmt) ? 2 : 0;
1295 hevc->level = ctx->level;
1297 hevc->tier = ctx->tier;
1300 hevc->useBFramesAsRef = ctx->b_ref_mode;
1340 NvencContext *ctx = avctx->priv_data;
1341 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
1350 ctx->encode_config.version = NV_ENC_CONFIG_VER;
1351 ctx->init_encode_params.version = NV_ENC_INITIALIZE_PARAMS_VER;
1353 ctx->init_encode_params.encodeHeight = avctx->height;
1354 ctx->init_encode_params.encodeWidth = avctx->width;
1356 ctx->init_encode_params.encodeConfig = &ctx->encode_config;
1362 ctx->init_encode_params.tuningInfo = ctx->tuning_info;
1364 if (ctx->flags & NVENC_LOSSLESS)
1365 ctx->init_encode_params.tuningInfo = NV_ENC_TUNING_INFO_LOSSLESS;
1366 else if (ctx->flags & NVENC_LOWLATENCY)
1367 ctx->init_encode_params.tuningInfo = NV_ENC_TUNING_INFO_LOW_LATENCY;
1369 nv_status = p_nvenc->nvEncGetEncodePresetConfigEx(ctx->nvencoder,
1370 ctx->init_encode_params.encodeGUID,
1371 ctx->init_encode_params.presetGUID,
1372 ctx->init_encode_params.tuningInfo,
1375 nv_status = p_nvenc->nvEncGetEncodePresetConfig(ctx->nvencoder,
1376 ctx->init_encode_params.encodeGUID,
1377 ctx->init_encode_params.presetGUID,
1383 memcpy(&ctx->encode_config, &preset_config.presetCfg, sizeof(ctx->encode_config));
1385 ctx->encode_config.version = NV_ENC_CONFIG_VER;
1388 ctx->init_encode_params.darHeight = dh;
1389 ctx->init_encode_params.darWidth = dw;
1392 ctx->init_encode_params.frameRateNum = avctx->framerate.num;
1393 ctx->init_encode_params.frameRateDen = avctx->framerate.den;
1395 ctx->init_encode_params.frameRateNum = avctx->time_base.den;
1396 ctx->init_encode_params.frameRateDen = avctx->time_base.num * avctx->ticks_per_frame;
1399 ctx->init_encode_params.enableEncodeAsync = 0;
1400 ctx->init_encode_params.enablePTD = 1;
1406 if (ctx->rc_lookahead == 0 && ctx->encode_config.rcParams.enableLookahead)
1407 ctx->rc_lookahead = ctx->encode_config.rcParams.lookaheadDepth;
1410 if (ctx->weighted_pred == 1)
1411 ctx->init_encode_params.enableWeightedPrediction = 1;
1413 if (ctx->bluray_compat) {
1414 ctx->aud = 1;
1415 ctx->dpb_size = FFMIN(FFMAX(avctx->refs, 0), 6);
1422 ctx->level = NV_ENC_LEVEL_HEVC_51;
1423 ctx->tier = NV_ENC_TIER_HEVC_HIGH;
1431 ctx->encode_config.frameIntervalP = avctx->max_b_frames + 1;
1434 ctx->encode_config.gopLength = avctx->gop_size;
1436 ctx->encode_config.frameIntervalP = 0;
1437 ctx->encode_config.gopLength = 1;
1441 if(ctx->single_slice_intra_refresh)
1442 ctx->intra_refresh = 1;
1444 if (ctx->intra_refresh)
1445 ctx->encode_config.gopLength = NVENC_INFINITE_GOPLENGTH;
1452 ctx->encode_config.frameFieldMode = NV_ENC_PARAMS_FRAME_FIELD_MODE_FIELD;
1454 ctx->encode_config.frameFieldMode = NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME;
1465 nv_status = p_nvenc->nvEncInitializeEncoder(ctx->nvencoder, &ctx->init_encode_params);
1472 if (ctx->cu_context) {
1473 nv_status = p_nvenc->nvEncSetIOCudaStreams(ctx->nvencoder, &ctx->cu_stream, &ctx->cu_stream);
1485 if (ctx->encode_config.frameIntervalP > 1)
1488 if (ctx->encode_config.rcParams.averageBitRate > 0)
1489 avctx->bit_rate = ctx->encode_config.rcParams.averageBitRate;
1494 cpb_props->max_bitrate = ctx->encode_config.rcParams.maxBitRate;
1496 cpb_props->buffer_size = ctx->encode_config.rcParams.vbvBufferSize;
1534 NvencContext *ctx = avctx->priv_data;
1535 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
1537 NvencSurface* tmp_surface = &ctx->surfaces[idx];
1544 ctx->surfaces[idx].in_ref = av_frame_alloc();
1545 if (!ctx->surfaces[idx].in_ref)
1550 ctx->surfaces[idx].format = nvenc_map_buffer_format(ctx->data_pix_fmt);
1551 if (ctx->surfaces[idx].format == NV_ENC_BUFFER_FORMAT_UNDEFINED) {
1553 av_get_pix_fmt_name(ctx->data_pix_fmt));
1560 allocSurf.bufferFmt = ctx->surfaces[idx].format;
1562 nv_status = p_nvenc->nvEncCreateInputBuffer(ctx->nvencoder, &allocSurf);
1567 ctx->surfaces[idx].input_surface = allocSurf.inputBuffer;
1568 ctx->surfaces[idx].width = allocSurf.width;
1569 ctx->surfaces[idx].height = allocSurf.height;
1572 nv_status = p_nvenc->nvEncCreateBitstreamBuffer(ctx->nvencoder, &allocOut);
1576 p_nvenc->nvEncDestroyInputBuffer(ctx->nvencoder, ctx->surfaces[idx].input_surface);
1577 av_frame_free(&ctx->surfaces[idx].in_ref);
1581 ctx->surfaces[idx].output_surface = allocOut.bitstreamBuffer;
1583 av_fifo_write(ctx->unused_surface_queue, &tmp_surface, 1);
1590 NvencContext *ctx = avctx->priv_data;
1593 ctx->surfaces = av_calloc(ctx->nb_surfaces, sizeof(*ctx->surfaces));
1594 if (!ctx->surfaces)
1597 ctx->timestamp_list = av_fifo_alloc2(ctx->nb_surfaces, sizeof(int64_t), 0);
1598 if (!ctx->timestamp_list)
1601 ctx->unused_surface_queue = av_fifo_alloc2(ctx->nb_surfaces, sizeof(NvencSurface*), 0);
1602 if (!ctx->unused_surface_queue)
1605 ctx->output_surface_queue = av_fifo_alloc2(ctx->nb_surfaces, sizeof(NvencSurface*), 0);
1606 if (!ctx->output_surface_queue)
1608 ctx->output_surface_ready_queue = av_fifo_alloc2(ctx->nb_surfaces, sizeof(NvencSurface*), 0);
1609 if (!ctx->output_surface_ready_queue)
1616 for (i = 0; i < ctx->nb_surfaces; i++) {
1631 NvencContext *ctx = avctx->priv_data;
1632 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
1645 nv_status = p_nvenc->nvEncGetSequenceParams(ctx->nvencoder, &payload);
1664 NvencContext *ctx = avctx->priv_data;
1665 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
1670 if (ctx->nvencoder) {
1678 p_nvenc->nvEncEncodePicture(ctx->nvencoder, ¶ms);
1681 av_fifo_freep2(&ctx->timestamp_list);
1682 av_fifo_freep2(&ctx->output_surface_ready_queue);
1683 av_fifo_freep2(&ctx->output_surface_queue);
1684 av_fifo_freep2(&ctx->unused_surface_queue);
1686 if (ctx->surfaces && (avctx->pix_fmt == AV_PIX_FMT_CUDA || avctx->pix_fmt == AV_PIX_FMT_D3D11)) {
1687 for (i = 0; i < ctx->nb_registered_frames; i++) {
1688 if (ctx->registered_frames[i].mapped)
1689 p_nvenc->nvEncUnmapInputResource(ctx->nvencoder, ctx->registered_frames[i].in_map.mappedResource);
1690 if (ctx->registered_frames[i].regptr)
1691 p_nvenc->nvEncUnregisterResource(ctx->nvencoder, ctx->registered_frames[i].regptr);
1693 ctx->nb_registered_frames = 0;
1696 if (ctx->surfaces) {
1697 for (i = 0; i < ctx->nb_surfaces; ++i) {
1699 p_nvenc->nvEncDestroyInputBuffer(ctx->nvencoder, ctx->surfaces[i].input_surface);
1700 av_frame_free(&ctx->surfaces[i].in_ref);
1701 p_nvenc->nvEncDestroyBitstreamBuffer(ctx->nvencoder, ctx->surfaces[i].output_surface);
1704 av_freep(&ctx->surfaces);
1705 ctx->nb_surfaces = 0;
1707 av_frame_free(&ctx->frame);
1709 av_freep(&ctx->sei_data);
1711 if (ctx->nvencoder) {
1712 p_nvenc->nvEncDestroyEncoder(ctx->nvencoder);
1718 ctx->nvencoder = NULL;
1720 if (ctx->cu_context_internal)
1721 CHECK_CU(dl_fn->cuda_dl->cuCtxDestroy(ctx->cu_context_internal));
1722 ctx->cu_context = ctx->cu_context_internal = NULL;
1725 if (ctx->d3d11_device) {
1726 ID3D11Device_Release(ctx->d3d11_device);
1727 ctx->d3d11_device = NULL;
1743 NvencContext *ctx = avctx->priv_data;
1759 ctx->data_pix_fmt = frames_ctx->sw_format;
1761 ctx->data_pix_fmt = avctx->pix_fmt;
1764 ctx->frame = av_frame_alloc();
1765 if (!ctx->frame)
1788 static NvencSurface *get_free_frame(NvencContext *ctx)
1792 if (av_fifo_read(ctx->unused_surface_queue, &tmp_surf, 1) < 0)
1831 NvencContext *ctx = avctx->priv_data;
1832 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
1838 if (ctx->nb_registered_frames == FF_ARRAY_ELEMS(ctx->registered_frames)) {
1840 for (i = 0; i < ctx->nb_registered_frames; i++) {
1841 if (!ctx->registered_frames[i].mapped) {
1842 if (ctx->registered_frames[i].regptr) {
1845 nv_status = p_nvenc->nvEncUnregisterResource(ctx->nvencoder, ctx->registered_frames[i].regptr);
1848 ctx->registered_frames[i].ptr = NULL;
1849 ctx->registered_frames[i].regptr = NULL;
1856 return ctx->nb_registered_frames++;
1865 NvencContext *ctx = avctx->priv_data;
1866 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
1873 for (i = 0; i < ctx->nb_registered_frames; i++) {
1874 if (avctx->pix_fmt == AV_PIX_FMT_CUDA && ctx->registered_frames[i].ptr == frame->data[0])
1876 else if (avctx->pix_fmt == AV_PIX_FMT_D3D11 && ctx->registered_frames[i].ptr == frame->data[0] && ctx->registered_frames[i].ptr_index == (intptr_t)frame->data[1])
1905 ret = p_nvenc->nvEncRegisterResource(ctx->nvencoder, ®);
1911 ctx->registered_frames[idx].ptr = frame->data[0];
1912 ctx->registered_frames[idx].ptr_index = reg.subResourceIndex;
1913 ctx->registered_frames[idx].regptr = reg.registeredResource;
1920 NvencContext *ctx = avctx->priv_data;
1921 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
1938 if (!ctx->registered_frames[reg_idx].mapped) {
1939 ctx->registered_frames[reg_idx].in_map.version = NV_ENC_MAP_INPUT_RESOURCE_VER;
1940 ctx->registered_frames[reg_idx].in_map.registeredResource = ctx->registered_frames[reg_idx].regptr;
1941 nv_status = p_nvenc->nvEncMapInputResource(ctx->nvencoder, &ctx->registered_frames[reg_idx].in_map);
1948 ctx->registered_frames[reg_idx].mapped += 1;
1951 nvenc_frame->input_surface = ctx->registered_frames[reg_idx].in_map.mappedResource;
1952 nvenc_frame->format = ctx->registered_frames[reg_idx].in_map.mappedBufferFmt;
1962 nv_status = p_nvenc->nvEncLockInputBuffer(ctx->nvencoder, &lockBufferParams);
1970 nv_status = p_nvenc->nvEncUnlockInputBuffer(ctx->nvencoder, nvenc_frame->input_surface);
1984 NvencContext *ctx = avctx->priv_data;
1989 ctx->encode_config.encodeCodecConfig.h264Config.sliceMode;
1991 ctx->encode_config.encodeCodecConfig.h264Config.sliceModeData;
2000 ctx->encode_config.encodeCodecConfig.hevcConfig.sliceMode;
2002 ctx->encode_config.encodeCodecConfig.hevcConfig.sliceModeData;
2030 NvencContext *ctx = avctx->priv_data;
2033 pkt->dts = timestamp_queue_dequeue(ctx->timestamp_list);
2035 pkt->dts -= FFMAX(ctx->encode_config.frameIntervalP - 1, 0) * FFMAX(avctx->ticks_per_frame, 1) * FFMAX(avctx->time_base.num, 1);
2042 NvencContext *ctx = avctx->priv_data;
2043 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
2056 slice_mode_data = ctx->encode_config.encodeCodecConfig.h264Config.sliceModeData;
2059 slice_mode_data = ctx->encode_config.encodeCodecConfig.hevcConfig.sliceModeData;
2079 nv_status = p_nvenc->nvEncLockBitstream(ctx->nvencoder, &lock_params);
2088 p_nvenc->nvEncUnlockBitstream(ctx->nvencoder, tmpoutsurf->output_surface);
2094 nv_status = p_nvenc->nvEncUnlockBitstream(ctx->nvencoder, tmpoutsurf->output_surface);
2102 ctx->registered_frames[tmpoutsurf->reg_idx].mapped -= 1;
2103 if (ctx->registered_frames[tmpoutsurf->reg_idx].mapped == 0) {
2104 nv_status = p_nvenc->nvEncUnmapInputResource(ctx->nvencoder, ctx->registered_frames[tmpoutsurf->reg_idx].in_map.mappedResource);
2109 } else if (ctx->registered_frames[tmpoutsurf->reg_idx].mapped < 0) {
2153 timestamp_queue_dequeue(ctx->timestamp_list);
2163 NvencContext *ctx = avctx->priv_data;
2166 nb_ready = av_fifo_can_read(ctx->output_surface_ready_queue);
2167 nb_pending = av_fifo_can_read(ctx->output_surface_queue);
2170 return (nb_ready > 0) && (nb_ready + nb_pending >= ctx->async_depth);
2175 NvencContext *ctx = avctx->priv_data;
2179 if (ctx->a53_cc && av_frame_get_side_data(frame, AV_FRAME_DATA_A53_CC)) {
2184 av_log(ctx, AV_LOG_ERROR, "Not enough memory for closed captions, skipping\n");
2188 void *tmp = av_fast_realloc(ctx->sei_data,
2189 &ctx->sei_data_size,
2190 (sei_count + 1) * sizeof(*ctx->sei_data));
2196 ctx->sei_data = tmp;
2197 ctx->sei_data[sei_count].payloadSize = (uint32_t)a53_size;
2198 ctx->sei_data[sei_count].payloadType = 4;
2199 ctx->sei_data[sei_count].payload = (uint8_t*)a53_data;
2205 if (ctx->s12m_tc && av_frame_get_side_data(frame, AV_FRAME_DATA_S12M_TIMECODE)) {
2210 av_log(ctx, AV_LOG_ERROR, "Not enough memory for timecode sei, skipping\n");
2214 void *tmp = av_fast_realloc(ctx->sei_data,
2215 &ctx->sei_data_size,
2216 (sei_count + 1) * sizeof(*ctx->sei_data));
2222 ctx->sei_data = tmp;
2223 ctx->sei_data[sei_count].payloadSize = (uint32_t)tc_size;
2224 ctx->sei_data[sei_count].payloadType = SEI_TYPE_TIME_CODE;
2225 ctx->sei_data[sei_count].payload = (uint8_t*)tc_data;
2231 if (!ctx->udu_sei)
2241 tmp = av_fast_realloc(ctx->sei_data,
2242 &ctx->sei_data_size,
2243 (sei_count + 1) * sizeof(*ctx->sei_data));
2248 ctx->sei_data = tmp;
2249 ctx->sei_data[sei_count].payloadSize = side_data->size;
2250 ctx->sei_data[sei_count].payloadType = SEI_TYPE_USER_DATA_UNREGISTERED;
2251 ctx->sei_data[sei_count].payload = av_memdup(side_data->data, side_data->size);
2253 if (!ctx->sei_data[sei_count].payload) {
2266 av_freep(&(ctx->sei_data[i].payload));
2273 NvencContext *ctx = avctx->priv_data;
2274 NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &ctx->nvenc_dload_funcs.nvenc_funcs;
2284 params.reInitEncodeParams = ctx->init_encode_params;
2287 if (dw != ctx->init_encode_params.darWidth || dh != ctx->init_encode_params.darHeight) {
2290 ctx->init_encode_params.darWidth,
2291 ctx->init_encode_params.darHeight, dw, dh);
2300 if (ctx->rc != NV_ENC_PARAMS_RC_CONSTQP && ctx->support_dyn_bitrate) {
2311 if (avctx->rc_max_rate > 0 && ctx->encode_config.rcParams.maxBitRate != avctx->rc_max_rate) {
2321 if (avctx->rc_buffer_size > 0 && ctx->encode_config.rcParams.vbvBufferSize != avctx->rc_buffer_size) {
2344 ret = p_nvenc->nvEncReconfigureEncoder(ctx->nvencoder, ¶ms);
2349 ctx->init_encode_params.darHeight = dh;
2350 ctx->init_encode_params.darWidth = dw;
2354 ctx->encode_config.rcParams.averageBitRate = params.reInitEncodeParams.encodeConfig->rcParams.averageBitRate;
2355 ctx->encode_config.rcParams.maxBitRate = params.reInitEncodeParams.encodeConfig->rcParams.maxBitRate;
2356 ctx->encode_config.rcParams.vbvBufferSize = params.reInitEncodeParams.encodeConfig->rcParams.vbvBufferSize;
2371 NvencContext *ctx = avctx->priv_data;
2372 NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
2378 if ((!ctx->cu_context && !ctx->d3d11_device) || !ctx->nvencoder)
2382 in_surf = get_free_frame(ctx);
2417 if (ctx->forced_idr >= 0 && frame->pict_type == AV_PICTURE_TYPE_I) {
2419 ctx->forced_idr ? NV_ENC_PIC_FLAG_FORCEIDR : NV_ENC_PIC_FLAG_FORCEINTRA;
2426 if (ctx->extra_sei) {
2433 nvenc_codec_specific_pic_params(avctx, &pic_params, ctx->sei_data, sei_count);
2442 nv_status = p_nvenc->nvEncEncodePicture(ctx->nvencoder, &pic_params);
2445 av_freep(&(ctx->sei_data[i].payload));
2456 av_fifo_write(ctx->output_surface_queue, &in_surf, 1);
2457 timestamp_queue_enqueue(ctx->timestamp_list, frame->pts);
2462 while (av_fifo_read(ctx->output_surface_queue, &tmp_out_surf, 1) >= 0)
2463 av_fifo_write(ctx->output_surface_ready_queue, &tmp_out_surf, 1);
2474 NvencContext *ctx = avctx->priv_data;
2476 AVFrame *frame = ctx->frame;
2478 if ((!ctx->cu_context && !ctx->d3d11_device) || !ctx->nvencoder)
2495 av_fifo_read(ctx->output_surface_ready_queue, &tmp_out_surf, 1);
2510 av_fifo_write(ctx->unused_surface_queue, &tmp_out_surf, 1);
2522 NvencContext *ctx = avctx->priv_data;
2525 av_fifo_reset2(ctx->timestamp_list);