1/* 2 * Copyright © Microsoft Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 */ 23 24#include "d3d12_screen.h" 25#include "d3d12_video_screen.h" 26#include "d3d12_format.h" 27#include "util/u_video.h" 28#include <directx/d3d12video.h> 29 30#include <wrl/client.h> 31using Microsoft::WRL::ComPtr; 32 33#include "d3d12_video_types.h" 34 35static bool 36d3d12_video_buffer_is_format_supported(struct pipe_screen *screen, 37 enum pipe_format format, 38 enum pipe_video_profile profile, 39 enum pipe_video_entrypoint entrypoint) 40{ 41 return (format == PIPE_FORMAT_NV12); 42} 43 44 45struct d3d12_video_resolution_to_level_mapping_entry 46{ 47 D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC resolution; 48 uint32_t level; 49}; 50 51static d3d12_video_resolution_to_level_mapping_entry 52get_max_level_resolution_video_decode_support(D3D12_VIDEO_DECODE_CONFIGURATION decoderConfig, 53 DXGI_FORMAT format, 54 struct pipe_screen *pscreen, 55 bool &outSupportAny, 56 D3D12_FEATURE_DATA_VIDEO_DECODE_SUPPORT &outSupportedConfig) 57{ 58 d3d12_video_resolution_to_level_mapping_entry supportedResult = {}; 59 outSupportAny = false; 60 outSupportedConfig = {}; 61 62 ComPtr<ID3D12VideoDevice> spD3D12VideoDevice; 63 struct d3d12_screen *pD3D12Screen = (struct d3d12_screen *) pscreen; 64 if (FAILED(pD3D12Screen->dev->QueryInterface(IID_PPV_ARGS(spD3D12VideoDevice.GetAddressOf())))) { 65 // No video support in underlying d3d12 device (decode needs ID3D12VideoDevice) 66 return supportedResult; 67 } 68 69 d3d12_video_resolution_to_level_mapping_entry resolutionsLevelList[] = { 70 { { 8192, 4320 }, 61 }, // 8k 71 { { 7680, 4800 }, 61 }, // 8k - alternative 72 { { 7680, 4320 }, 61 }, // 8k - alternative 73 { { 4096, 2304 }, 52 }, // 2160p (4K) 74 { { 4096, 2160 }, 52 }, // 2160p (4K) - alternative 75 { { 2560, 1440 }, 51 }, // 1440p 76 { { 1920, 1200 }, 5 }, // 1200p 77 { { 1920, 1080 }, 42 }, // 1080p 78 { { 1280, 720 }, 4 }, // 720p 79 { { 800, 600 }, 31 }, 80 }; 81 82 D3D12_FEATURE_DATA_VIDEO_DECODE_SUPPORT decodeSupport = {}; 83 decodeSupport.Configuration = decoderConfig; 84 decodeSupport.DecodeFormat = format; 85 86 uint32_t idxResol = 0; 87 while ((idxResol < ARRAY_SIZE(resolutionsLevelList)) && !outSupportAny) { 88 89 decodeSupport.Width = resolutionsLevelList[idxResol].resolution.Width; 90 decodeSupport.Height = resolutionsLevelList[idxResol].resolution.Height; 91 92 if (SUCCEEDED(spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_DECODE_SUPPORT, 93 &decodeSupport, 94 sizeof(decodeSupport)))) { 95 96 if (((decodeSupport.SupportFlags & D3D12_VIDEO_DECODE_SUPPORT_FLAG_SUPPORTED) != 0) || 97 decodeSupport.DecodeTier > D3D12_VIDEO_DECODE_TIER_NOT_SUPPORTED) { 98 99 outSupportAny = true; 100 outSupportedConfig = decodeSupport; 101 supportedResult = resolutionsLevelList[idxResol]; 102 } 103 } 104 105 idxResol++; 106 } 107 108 return supportedResult; 109} 110 111static bool 112d3d12_has_video_decode_support(struct pipe_screen *pscreen, enum pipe_video_profile profile) 113{ 114 ComPtr<ID3D12VideoDevice> spD3D12VideoDevice; 115 struct d3d12_screen *pD3D12Screen = (struct d3d12_screen *) pscreen; 116 if (FAILED(pD3D12Screen->dev->QueryInterface(IID_PPV_ARGS(spD3D12VideoDevice.GetAddressOf())))) { 117 // No video support in underlying d3d12 device (needs ID3D12VideoDevice) 118 return 0; 119 } 120 121 D3D12_FEATURE_DATA_VIDEO_FEATURE_AREA_SUPPORT VideoFeatureAreaSupport = {}; 122 if (FAILED(spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_FEATURE_AREA_SUPPORT, 123 &VideoFeatureAreaSupport, 124 sizeof(VideoFeatureAreaSupport)))) { 125 return false; 126 } 127 128 // Supported profiles below 129 bool supportsProfile = false; 130 switch (profile) { 131 case PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE: 132 case PIPE_VIDEO_PROFILE_MPEG4_AVC_EXTENDED: 133 case PIPE_VIDEO_PROFILE_MPEG4_AVC_CONSTRAINED_BASELINE: 134 case PIPE_VIDEO_PROFILE_MPEG4_AVC_MAIN: 135 case PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH: 136 case PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH10: 137 { 138 supportsProfile = true; 139 } break; 140 default: 141 supportsProfile = false; 142 } 143 144 return VideoFeatureAreaSupport.VideoDecodeSupport && supportsProfile; 145} 146 147static bool 148d3d12_video_encode_max_supported_level_for_profile(const D3D12_VIDEO_ENCODER_CODEC &argCodec, 149 const D3D12_VIDEO_ENCODER_PROFILE_DESC &argTargetProfile, 150 D3D12_VIDEO_ENCODER_LEVEL_SETTING &minLvl, 151 D3D12_VIDEO_ENCODER_LEVEL_SETTING &maxLvl, 152 ID3D12VideoDevice3 *pD3D12VideoDevice) 153{ 154 D3D12_FEATURE_DATA_VIDEO_ENCODER_PROFILE_LEVEL capLevelData = {}; 155 capLevelData.NodeIndex = 0; 156 capLevelData.Codec = argCodec; 157 capLevelData.Profile = argTargetProfile; 158 capLevelData.MinSupportedLevel = minLvl; 159 capLevelData.MaxSupportedLevel = maxLvl; 160 161 if (FAILED(pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_PROFILE_LEVEL, 162 &capLevelData, 163 sizeof(capLevelData)))) { 164 return false; 165 } 166 167 return capLevelData.IsSupported; 168} 169 170static bool 171d3d12_video_encode_max_supported_resolution(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec, 172 D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC &maxResolution, 173 ID3D12VideoDevice3 *pD3D12VideoDevice) 174{ 175 D3D12_FEATURE_DATA_VIDEO_ENCODER_OUTPUT_RESOLUTION_RATIOS_COUNT capResRatiosCountData = { 0, argTargetCodec, 0 }; 176 177 if (FAILED(pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_OUTPUT_RESOLUTION_RATIOS_COUNT, 178 &capResRatiosCountData, 179 sizeof(capResRatiosCountData)))) { 180 return false; 181 } 182 183 D3D12_FEATURE_DATA_VIDEO_ENCODER_OUTPUT_RESOLUTION capOutputResolutionData = {}; 184 capOutputResolutionData.NodeIndex = 0; 185 capOutputResolutionData.Codec = argTargetCodec; 186 capOutputResolutionData.ResolutionRatiosCount = capResRatiosCountData.ResolutionRatiosCount; 187 188 std::vector<D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_RATIO_DESC> ratiosTmpOutput; 189 if (capResRatiosCountData.ResolutionRatiosCount > 0) { 190 ratiosTmpOutput.resize(capResRatiosCountData.ResolutionRatiosCount); 191 capOutputResolutionData.pResolutionRatios = ratiosTmpOutput.data(); 192 } else { 193 capOutputResolutionData.pResolutionRatios = nullptr; 194 } 195 196 if (FAILED(pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_OUTPUT_RESOLUTION, 197 &capOutputResolutionData, 198 sizeof(capOutputResolutionData))) || 199 !capOutputResolutionData.IsSupported) { 200 return false; 201 } 202 203 maxResolution = capOutputResolutionData.MaxResolutionSupported; 204 205 return true; 206} 207 208static uint32_t 209d3d12_video_encode_supported_references_per_frame_structures(const D3D12_VIDEO_ENCODER_CODEC &codec, 210 D3D12_VIDEO_ENCODER_PROFILE_H264 profile, 211 D3D12_VIDEO_ENCODER_LEVELS_H264 level, 212 ID3D12VideoDevice3 *pD3D12VideoDevice) 213{ 214 uint32_t supportedMaxRefFrames = 0u; 215 216 D3D12_VIDEO_ENCODER_CODEC_PICTURE_CONTROL_SUPPORT_H264 h264PictureControl = {}; 217 D3D12_FEATURE_DATA_VIDEO_ENCODER_CODEC_PICTURE_CONTROL_SUPPORT capPictureControlData = {}; 218 capPictureControlData.NodeIndex = 0; 219 capPictureControlData.Codec = codec; 220 capPictureControlData.Profile.pH264Profile = &profile; 221 capPictureControlData.Profile.DataSize = sizeof(profile); 222 capPictureControlData.PictureSupport.pH264Support = &h264PictureControl; 223 capPictureControlData.PictureSupport.DataSize = sizeof(h264PictureControl); 224 HRESULT hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_CODEC_PICTURE_CONTROL_SUPPORT, 225 &capPictureControlData, 226 sizeof(capPictureControlData)); 227 if (FAILED(hr)) { 228 debug_printf("CheckFeatureSupport failed with HR %x\n", hr); 229 } 230 231 if (capPictureControlData.IsSupported) { 232 /* This attribute determines the maximum number of reference 233 * frames supported for encoding. 234 * 235 * Note: for H.264 encoding, the value represents the maximum number 236 * of reference frames for both the reference picture list 0 (bottom 237 * 16 bits) and the reference picture list 1 (top 16 bits). 238 */ 239 uint32_t maxRefForL0 = std::min(capPictureControlData.PictureSupport.pH264Support->MaxL0ReferencesForP, 240 capPictureControlData.PictureSupport.pH264Support->MaxL0ReferencesForB); 241 uint32_t maxRefForL1 = capPictureControlData.PictureSupport.pH264Support->MaxL1ReferencesForB; 242 supportedMaxRefFrames = (maxRefForL0 & 0xffff) | ((maxRefForL1 & 0xffff) << 16); 243 } 244 245 return supportedMaxRefFrames; 246} 247 248static uint32_t 249d3d12_video_encode_supported_slice_structures(const D3D12_VIDEO_ENCODER_CODEC &codec, 250 D3D12_VIDEO_ENCODER_PROFILE_H264 profile, 251 D3D12_VIDEO_ENCODER_LEVELS_H264 level, 252 ID3D12VideoDevice3 *pD3D12VideoDevice) 253{ 254 uint32_t supportedSliceStructuresBitMask = PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE; 255 256 D3D12_FEATURE_DATA_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE capDataSubregionLayout = {}; 257 capDataSubregionLayout.NodeIndex = 0; 258 capDataSubregionLayout.Codec = codec; 259 capDataSubregionLayout.Profile.pH264Profile = &profile; 260 capDataSubregionLayout.Profile.DataSize = sizeof(profile); 261 capDataSubregionLayout.Level.pH264LevelSetting = &level; 262 capDataSubregionLayout.Level.DataSize = sizeof(level); 263 264 /** 265 * pipe_video_cap_slice_structure 266 * 267 * This attribute determines slice structures supported by the 268 * driver for encoding. This attribute is a hint to the user so 269 * that he can choose a suitable surface size and how to arrange 270 * the encoding process of multiple slices per frame. 271 * 272 * More specifically, for H.264 encoding, this attribute 273 * determines the range of accepted values to 274 * h264_slice_descriptor::macroblock_address and 275 * h264_slice_descriptor::num_macroblocks. 276 */ 277 capDataSubregionLayout.SubregionMode = 278 D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_UNIFORM_PARTITIONING_SUBREGIONS_PER_FRAME; 279 HRESULT hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE, 280 &capDataSubregionLayout, 281 sizeof(capDataSubregionLayout)); 282 if (FAILED(hr)) { 283 debug_printf("CheckFeatureSupport failed with HR %x\n", hr); 284 } else if (capDataSubregionLayout.IsSupported) { 285 /* This would be setting N subregions per frame in this D3D12 mode where N = (height/blocksize) / K */ 286 supportedSliceStructuresBitMask |= PIPE_VIDEO_CAP_SLICE_STRUCTURE_EQUAL_MULTI_ROWS; 287 /* Assuming height/blocksize >= max_supported_slices, which is reported 288 in PIPE_VIDEO_CAP_ENC_MAX_SLICES_PER_FRAME and should be checked by the client*/ 289 /* This would be setting N subregions per frame in this D3D12 mode where N = (height/blocksize) */ 290 supportedSliceStructuresBitMask |= PIPE_VIDEO_CAP_SLICE_STRUCTURE_EQUAL_ROWS; 291 /* This is ok, would be setting K rows per subregions in this D3D12 mode (and rounding the last one) */ 292 supportedSliceStructuresBitMask |= PIPE_VIDEO_CAP_SLICE_STRUCTURE_POWER_OF_TWO_ROWS; 293 } 294 295 capDataSubregionLayout.SubregionMode = 296 D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_UNIFORM_PARTITIONING_ROWS_PER_SUBREGION; 297 hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE, 298 &capDataSubregionLayout, 299 sizeof(capDataSubregionLayout)); 300 if (FAILED(hr)) { 301 debug_printf("CheckFeatureSupport failed with HR %x\n", hr); 302 } else if (capDataSubregionLayout.IsSupported) { 303 /* This would be setting K rows per subregions in this D3D12 mode */ 304 supportedSliceStructuresBitMask |= PIPE_VIDEO_CAP_SLICE_STRUCTURE_EQUAL_MULTI_ROWS; 305 /* Assuming height/blocksize >= max_supported_slices, which is reported 306 in PIPE_VIDEO_CAP_ENC_MAX_SLICES_PER_FRAME and should be checked by the client*/ 307 /* This would be setting 1 row per subregion in this D3D12 mode */ 308 supportedSliceStructuresBitMask |= PIPE_VIDEO_CAP_SLICE_STRUCTURE_EQUAL_ROWS; 309 /* This is ok, would be setting K rows per subregions in this D3D12 mode (and rounding the last one) */ 310 supportedSliceStructuresBitMask |= PIPE_VIDEO_CAP_SLICE_STRUCTURE_POWER_OF_TWO_ROWS; 311 } 312 313 /* Needs more work in VA frontend to support VAEncMiscParameterMaxSliceSize 314 and the driver potentially reporting back status in VACodedBufferSegment */ 315 316 /*capDataSubregionLayout.SubregionMode = D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_BYTES_PER_SUBREGION; 317 hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE, 318 &capDataSubregionLayout, 319 sizeof(capDataSubregionLayout)); 320 if (FAILED(hr)) { 321 debug_printf("CheckFeatureSupport failed with HR %x\n", hr); 322 } else if (capDataSubregionLayout.IsSupported) { 323 supportedSliceStructuresBitMask |= PIPE_VIDEO_CAP_SLICE_STRUCTURE_MAX_SLICE_SIZE; 324 }*/ 325 326 return supportedSliceStructuresBitMask; 327} 328 329static bool 330d3d12_video_encode_max_supported_slices(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec, 331 D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC maxResolution, 332 DXGI_FORMAT encodeFormat, 333 uint32_t &outMaxSlices, 334 ID3D12VideoDevice3 *pD3D12VideoDevice) 335{ 336 D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT capEncoderSupportData = {}; 337 capEncoderSupportData.NodeIndex = 0; 338 capEncoderSupportData.Codec = argTargetCodec; 339 capEncoderSupportData.InputFormat = encodeFormat; 340 capEncoderSupportData.RateControl = {}; 341 capEncoderSupportData.RateControl.Mode = D3D12_VIDEO_ENCODER_RATE_CONTROL_MODE_CQP; 342 capEncoderSupportData.RateControl.TargetFrameRate.Numerator = 60; 343 capEncoderSupportData.RateControl.TargetFrameRate.Denominator = 1; 344 D3D12_VIDEO_ENCODER_RATE_CONTROL_CQP rcCqp = { 25, 25, 25 }; 345 capEncoderSupportData.RateControl.ConfigParams.pConfiguration_CQP = &rcCqp; 346 capEncoderSupportData.RateControl.ConfigParams.DataSize = sizeof(rcCqp); 347 capEncoderSupportData.IntraRefresh = D3D12_VIDEO_ENCODER_INTRA_REFRESH_MODE_NONE; 348 capEncoderSupportData.ResolutionsListCount = 1; 349 capEncoderSupportData.pResolutionList = &maxResolution; 350 capEncoderSupportData.MaxReferenceFramesInDPB = 1; 351 capEncoderSupportData.SubregionFrameEncoding = 352 D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_UNIFORM_PARTITIONING_SUBREGIONS_PER_FRAME; 353 354 D3D12_VIDEO_ENCODER_PROFILE_H264 h264prof = {}; 355 D3D12_VIDEO_ENCODER_LEVELS_H264 h264lvl = {}; 356 D3D12_VIDEO_ENCODER_SEQUENCE_GOP_STRUCTURE_H264 h264Gop = { 1, 0, 0, 0, 0 }; 357 D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_H264 h264Config = {}; 358 switch (argTargetCodec) { 359 case D3D12_VIDEO_ENCODER_CODEC_H264: 360 { 361 capEncoderSupportData.SuggestedProfile.pH264Profile = &h264prof; 362 capEncoderSupportData.SuggestedProfile.DataSize = sizeof(h264prof); 363 capEncoderSupportData.SuggestedLevel.pH264LevelSetting = &h264lvl; 364 capEncoderSupportData.SuggestedLevel.DataSize = sizeof(h264lvl); 365 capEncoderSupportData.CodecGopSequence.pH264GroupOfPictures = &h264Gop; 366 capEncoderSupportData.CodecGopSequence.DataSize = sizeof(h264Gop); 367 capEncoderSupportData.CodecConfiguration.DataSize = sizeof(h264Config); 368 capEncoderSupportData.CodecConfiguration.pH264Config = &h264Config; 369 } break; 370 371 default: 372 { 373 unreachable("Unsupported D3D12_VIDEO_ENCODER_CODEC"); 374 } break; 375 } 376 377 // prepare inout storage for the resolution dependent result. 378 D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS resolutionDepCaps = {}; 379 capEncoderSupportData.pResolutionDependentSupport = &resolutionDepCaps; 380 381 HRESULT hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT, 382 &capEncoderSupportData, 383 sizeof(capEncoderSupportData)); 384 if (FAILED(hr)) { 385 debug_printf("CheckFeatureSupport failed with HR %x\n", hr); 386 return false; 387 } else { 388 bool configSupported = 389 (((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_GENERAL_SUPPORT_OK) != 0) && 390 (capEncoderSupportData.ValidationFlags == D3D12_VIDEO_ENCODER_VALIDATION_FLAG_NONE)); 391 392 outMaxSlices = resolutionDepCaps.MaxSubregionsNumber; 393 return configSupported; 394 } 395} 396 397static bool 398d3d12_has_video_encode_support(struct pipe_screen *pscreen, 399 enum pipe_video_profile profile, 400 uint32_t &maxLvlSpec, 401 D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC &maxRes, 402 uint32_t &maxSlices, 403 uint32_t &supportedSliceStructures, 404 uint32_t &maxReferencesPerFrame) 405{ 406 ComPtr<ID3D12VideoDevice3> spD3D12VideoDevice; 407 struct d3d12_screen *pD3D12Screen = (struct d3d12_screen *) pscreen; 408 if (FAILED(pD3D12Screen->dev->QueryInterface(IID_PPV_ARGS(spD3D12VideoDevice.GetAddressOf())))) { 409 // No video encode support in underlying d3d12 device (needs ID3D12VideoDevice3) 410 return 0; 411 } 412 413 D3D12_FEATURE_DATA_VIDEO_FEATURE_AREA_SUPPORT VideoFeatureAreaSupport = {}; 414 if (FAILED(spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_FEATURE_AREA_SUPPORT, 415 &VideoFeatureAreaSupport, 416 sizeof(VideoFeatureAreaSupport)))) { 417 return false; 418 } 419 420 bool supportsProfile = false; 421 switch (profile) { 422 case PIPE_VIDEO_PROFILE_MPEG4_AVC_CONSTRAINED_BASELINE: 423 case PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE: 424 case PIPE_VIDEO_PROFILE_MPEG4_AVC_MAIN: 425 case PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH: 426 case PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH10: 427 { 428 supportsProfile = true; 429 D3D12_VIDEO_ENCODER_PROFILE_DESC profDesc = {}; 430 D3D12_VIDEO_ENCODER_PROFILE_H264 profH264 = 431 d3d12_video_encoder_convert_profile_to_d3d12_enc_profile_h264(profile); 432 profDesc.DataSize = sizeof(profH264); 433 profDesc.pH264Profile = &profH264; 434 D3D12_VIDEO_ENCODER_CODEC codecDesc = d3d12_video_encoder_convert_codec_to_d3d12_enc_codec(profile); 435 D3D12_VIDEO_ENCODER_LEVELS_H264 minLvlSettingH264 = static_cast<D3D12_VIDEO_ENCODER_LEVELS_H264>(0); 436 D3D12_VIDEO_ENCODER_LEVELS_H264 maxLvlSettingH264 = static_cast<D3D12_VIDEO_ENCODER_LEVELS_H264>(0); 437 D3D12_VIDEO_ENCODER_LEVEL_SETTING minLvl = {}; 438 D3D12_VIDEO_ENCODER_LEVEL_SETTING maxLvl = {}; 439 minLvl.pH264LevelSetting = &minLvlSettingH264; 440 minLvl.DataSize = sizeof(minLvlSettingH264); 441 maxLvl.pH264LevelSetting = &maxLvlSettingH264; 442 maxLvl.DataSize = sizeof(maxLvlSettingH264); 443 if (d3d12_video_encode_max_supported_level_for_profile(codecDesc, 444 profDesc, 445 minLvl, 446 maxLvl, 447 spD3D12VideoDevice.Get())) { 448 uint32_t constraintset3flag = false; 449 d3d12_video_encoder_convert_from_d3d12_level_h264(maxLvlSettingH264, maxLvlSpec, constraintset3flag); 450 supportsProfile = true; 451 } 452 453 if (supportsProfile) { 454 DXGI_FORMAT encodeFormat = d3d12_convert_pipe_video_profile_to_dxgi_format(profile); 455 supportsProfile = supportsProfile && 456 d3d12_video_encode_max_supported_resolution(codecDesc, maxRes, spD3D12VideoDevice.Get()); 457 supportsProfile = supportsProfile && d3d12_video_encode_max_supported_slices(codecDesc, 458 maxRes, 459 encodeFormat, 460 maxSlices, 461 spD3D12VideoDevice.Get()); 462 supportedSliceStructures = d3d12_video_encode_supported_slice_structures(codecDesc, 463 profH264, 464 maxLvlSettingH264, 465 spD3D12VideoDevice.Get()); 466 maxReferencesPerFrame = 467 d3d12_video_encode_supported_references_per_frame_structures(codecDesc, 468 profH264, 469 maxLvlSettingH264, 470 spD3D12VideoDevice.Get()); 471 } 472 } break; 473 default: 474 supportsProfile = false; 475 } 476 477 return VideoFeatureAreaSupport.VideoEncodeSupport && supportsProfile; 478} 479 480static int 481d3d12_screen_get_video_param_decode(struct pipe_screen *pscreen, 482 enum pipe_video_profile profile, 483 enum pipe_video_entrypoint entrypoint, 484 enum pipe_video_cap param) 485{ 486 switch (param) { 487 case PIPE_VIDEO_CAP_NPOT_TEXTURES: 488 return 1; 489 case PIPE_VIDEO_CAP_MAX_WIDTH: 490 case PIPE_VIDEO_CAP_MAX_HEIGHT: 491 case PIPE_VIDEO_CAP_MAX_LEVEL: 492 case PIPE_VIDEO_CAP_SUPPORTED: 493 { 494 if (d3d12_has_video_decode_support(pscreen, profile)) { 495 DXGI_FORMAT format = d3d12_convert_pipe_video_profile_to_dxgi_format(profile); 496 auto pipeFmt = d3d12_get_pipe_format(format); 497 bool formatSupported = pscreen->is_video_format_supported(pscreen, pipeFmt, profile, entrypoint); 498 if (formatSupported) { 499 GUID decodeGUID = d3d12_video_decoder_convert_pipe_video_profile_to_d3d12_profile(profile); 500 GUID emptyGUID = {}; 501 if (decodeGUID != emptyGUID) { 502 bool supportAny = false; 503 D3D12_FEATURE_DATA_VIDEO_DECODE_SUPPORT outSupportedConfig = {}; 504 D3D12_VIDEO_DECODE_CONFIGURATION decoderConfig = { decodeGUID, 505 D3D12_BITSTREAM_ENCRYPTION_TYPE_NONE, 506 D3D12_VIDEO_FRAME_CODED_INTERLACE_TYPE_NONE }; 507 508 d3d12_video_resolution_to_level_mapping_entry bestSupportedConfig = 509 get_max_level_resolution_video_decode_support(decoderConfig, 510 format, 511 pscreen, 512 supportAny, 513 outSupportedConfig); 514 if (supportAny) { 515 if (param == PIPE_VIDEO_CAP_MAX_WIDTH) { 516 return bestSupportedConfig.resolution.Width; 517 } else if (param == PIPE_VIDEO_CAP_MAX_HEIGHT) { 518 return bestSupportedConfig.resolution.Height; 519 } else if (param == PIPE_VIDEO_CAP_MAX_LEVEL) { 520 return bestSupportedConfig.level; 521 } else if (param == PIPE_VIDEO_CAP_SUPPORTED) { 522 return 1; 523 } 524 } 525 } 526 } 527 } 528 return 0; 529 } break; 530 case PIPE_VIDEO_CAP_PREFERED_FORMAT: 531 return PIPE_FORMAT_NV12; 532 case PIPE_VIDEO_CAP_PREFERS_INTERLACED: 533 return false; 534 case PIPE_VIDEO_CAP_SUPPORTS_INTERLACED: 535 return true; 536 case PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE: 537 return true; 538 case PIPE_VIDEO_SUPPORTS_CONTIGUOUS_PLANES_MAP: 539 return true; 540 break; 541 default: 542 debug_printf("[d3d12_screen_get_video_param] unknown video param: %d\n", param); 543 return 0; 544 } 545} 546 547 548static bool 549d3d12_has_video_process_support(struct pipe_screen *pscreen, D3D12_FEATURE_DATA_VIDEO_PROCESS_SUPPORT &supportCaps) 550{ 551 ComPtr<ID3D12VideoDevice2> spD3D12VideoDevice; 552 struct d3d12_screen *pD3D12Screen = (struct d3d12_screen *) pscreen; 553 if (FAILED(pD3D12Screen->dev->QueryInterface(IID_PPV_ARGS(spD3D12VideoDevice.GetAddressOf())))) { 554 // No video encode support in underlying d3d12 device (needs ID3D12VideoDevice2) 555 return false; 556 } 557 558 D3D12_FEATURE_DATA_VIDEO_FEATURE_AREA_SUPPORT VideoFeatureAreaSupport = {}; 559 if (FAILED(spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_FEATURE_AREA_SUPPORT, 560 &VideoFeatureAreaSupport, 561 sizeof(VideoFeatureAreaSupport)))) { 562 return false; 563 } 564 565 struct ResolStruct { 566 uint Width; 567 uint Height; 568 }; 569 570 ResolStruct resolutionsList[] = { 571 { 8192, 8192 }, // 8k 572 { 8192, 4320 }, // 8k - alternative 573 { 7680, 4800 }, // 8k - alternative 574 { 7680, 4320 }, // 8k - alternative 575 { 4096, 2304 }, // 2160p (4K) 576 { 4096, 2160 }, // 2160p (4K) - alternative 577 { 2560, 1440 }, // 1440p 578 { 1920, 1200 }, // 1200p 579 { 1920, 1080 }, // 1080p 580 { 1280, 720 }, // 720p 581 { 800, 600 }, 582 }; 583 584 uint32_t idxResol = 0; 585 bool bSupportsAny = false; 586 while ((idxResol < ARRAY_SIZE(resolutionsList)) && !bSupportsAny) { 587 supportCaps.InputSample.Width = resolutionsList[idxResol].Width; 588 supportCaps.InputSample.Height = resolutionsList[idxResol].Height; 589 if (SUCCEEDED(spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_PROCESS_SUPPORT, &supportCaps, sizeof(supportCaps)))) { 590 bSupportsAny = ((supportCaps.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED) != 0) ; 591 } 592 idxResol++; 593 } 594 595 return VideoFeatureAreaSupport.VideoProcessSupport && bSupportsAny; 596} 597 598static int 599d3d12_screen_get_video_param_postproc(struct pipe_screen *pscreen, 600 enum pipe_video_profile profile, 601 enum pipe_video_entrypoint entrypoint, 602 enum pipe_video_cap param) 603{ 604 switch (param) { 605 case PIPE_VIDEO_CAP_NPOT_TEXTURES: 606 return 1; 607 case PIPE_VIDEO_CAP_MAX_WIDTH: 608 case PIPE_VIDEO_CAP_MAX_HEIGHT: 609 case PIPE_VIDEO_CAP_SUPPORTED: 610 case PIPE_VIDEO_CAP_PREFERED_FORMAT: 611 case PIPE_VIDEO_CAP_SUPPORTS_INTERLACED: 612 case PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE: 613 case PIPE_VIDEO_SUPPORTS_CONTIGUOUS_PLANES_MAP: 614 case PIPE_VIDEO_CAP_VPP_MAX_INPUT_WIDTH: 615 case PIPE_VIDEO_CAP_VPP_MAX_INPUT_HEIGHT: 616 case PIPE_VIDEO_CAP_VPP_MIN_INPUT_WIDTH: 617 case PIPE_VIDEO_CAP_VPP_MIN_INPUT_HEIGHT: 618 case PIPE_VIDEO_CAP_VPP_MAX_OUTPUT_WIDTH: 619 case PIPE_VIDEO_CAP_VPP_MAX_OUTPUT_HEIGHT: 620 case PIPE_VIDEO_CAP_VPP_MIN_OUTPUT_WIDTH: 621 case PIPE_VIDEO_CAP_VPP_MIN_OUTPUT_HEIGHT: 622 case PIPE_VIDEO_CAP_VPP_ORIENTATION_MODES: 623 case PIPE_VIDEO_CAP_VPP_BLEND_MODES: 624 { 625 // Assume defaults for now, we don't have the input args passed by get_video_param to be accurate here. 626 const D3D12_VIDEO_FIELD_TYPE FieldType = D3D12_VIDEO_FIELD_TYPE_NONE; 627 const D3D12_VIDEO_FRAME_STEREO_FORMAT StereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE; 628 const DXGI_RATIONAL FrameRate = { 30, 1 }; 629 const DXGI_FORMAT InputFormat = DXGI_FORMAT_NV12; 630 const DXGI_COLOR_SPACE_TYPE InputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709; 631 const DXGI_FORMAT OutputFormat = DXGI_FORMAT_NV12; 632 const DXGI_COLOR_SPACE_TYPE OutputColorSpace = DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709; 633 const UINT Width = 1280; 634 const UINT Height = 720; 635 D3D12_FEATURE_DATA_VIDEO_PROCESS_SUPPORT supportCaps = 636 { 637 0, // NodeIndex 638 { Width, Height, { InputFormat, InputColorSpace } }, 639 FieldType, 640 StereoFormat, 641 FrameRate, 642 { OutputFormat, OutputColorSpace }, 643 StereoFormat, 644 FrameRate, 645 }; 646 647 if (d3d12_has_video_process_support(pscreen, supportCaps)) { 648 if (param == PIPE_VIDEO_CAP_SUPPORTED) { 649 return true; 650 } else if (param == PIPE_VIDEO_CAP_PREFERED_FORMAT) { 651 return PIPE_FORMAT_NV12; 652 } else if (param == PIPE_VIDEO_CAP_SUPPORTS_INTERLACED) { 653 return false; 654 } else if (param == PIPE_VIDEO_CAP_MAX_WIDTH) { 655 return supportCaps.InputSample.Width; 656 } else if (param == PIPE_VIDEO_CAP_MAX_HEIGHT) { 657 return supportCaps.InputSample.Height; 658 } else if (param == PIPE_VIDEO_SUPPORTS_CONTIGUOUS_PLANES_MAP) { 659 return true; 660 } else if (param == PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE) { 661 return true; 662 } else if (param == PIPE_VIDEO_CAP_VPP_MAX_INPUT_WIDTH) { 663 return supportCaps.ScaleSupport.OutputSizeRange.MaxWidth; 664 } else if (param == PIPE_VIDEO_CAP_VPP_MAX_INPUT_HEIGHT) { 665 return supportCaps.ScaleSupport.OutputSizeRange.MaxHeight; 666 } else if (param == PIPE_VIDEO_CAP_VPP_MIN_INPUT_WIDTH) { 667 return supportCaps.ScaleSupport.OutputSizeRange.MinWidth; 668 } else if (param == PIPE_VIDEO_CAP_VPP_MIN_INPUT_HEIGHT) { 669 return supportCaps.ScaleSupport.OutputSizeRange.MinHeight; 670 } else if (param == PIPE_VIDEO_CAP_VPP_MAX_OUTPUT_WIDTH) { 671 return supportCaps.ScaleSupport.OutputSizeRange.MaxWidth; 672 } else if (param == PIPE_VIDEO_CAP_VPP_MAX_OUTPUT_HEIGHT) { 673 return supportCaps.ScaleSupport.OutputSizeRange.MaxHeight; 674 } else if (param == PIPE_VIDEO_CAP_VPP_MIN_OUTPUT_WIDTH) { 675 return supportCaps.ScaleSupport.OutputSizeRange.MinWidth; 676 } else if (param == PIPE_VIDEO_CAP_VPP_MIN_OUTPUT_HEIGHT) { 677 return supportCaps.ScaleSupport.OutputSizeRange.MinHeight; 678 } else if (param == PIPE_VIDEO_CAP_VPP_BLEND_MODES) { 679 uint32_t blend_modes = PIPE_VIDEO_VPP_BLEND_MODE_NONE; 680 if (((supportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_BLENDING) != 0) 681 && ((supportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ALPHA_FILL) != 0)) 682 { 683 blend_modes |= PIPE_VIDEO_VPP_BLEND_MODE_GLOBAL_ALPHA; 684 } 685 return blend_modes; 686 } else if (param == PIPE_VIDEO_CAP_VPP_ORIENTATION_MODES) { 687 uint32_t orientation_modes = PIPE_VIDEO_VPP_ORIENTATION_DEFAULT; 688 if((supportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_FLIP) != 0) { 689 orientation_modes |= PIPE_VIDEO_VPP_FLIP_HORIZONTAL; 690 orientation_modes |= PIPE_VIDEO_VPP_FLIP_VERTICAL; 691 } 692 693 if((supportCaps.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_ROTATION) != 0) { 694 orientation_modes |= PIPE_VIDEO_VPP_ROTATION_90; 695 orientation_modes |= PIPE_VIDEO_VPP_ROTATION_180; 696 orientation_modes |= PIPE_VIDEO_VPP_ROTATION_270; 697 } 698 return orientation_modes; 699 } 700 } 701 return 0; 702 } break; 703 default: 704 return 0; 705 } 706} 707 708static int 709d3d12_screen_get_video_param_encode(struct pipe_screen *pscreen, 710 enum pipe_video_profile profile, 711 enum pipe_video_entrypoint entrypoint, 712 enum pipe_video_cap param) 713{ 714 uint32_t maxLvlEncode = 0u; 715 D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC maxResEncode = {}; 716 uint32_t maxSlices = 0u; 717 uint32_t supportedSliceStructures = 0u; 718 uint32_t maxReferencesPerFrame = 0u; 719 switch (param) { 720 case PIPE_VIDEO_CAP_NPOT_TEXTURES: 721 return 1; 722 case PIPE_VIDEO_CAP_MAX_WIDTH: 723 case PIPE_VIDEO_CAP_MAX_HEIGHT: 724 case PIPE_VIDEO_CAP_MAX_LEVEL: 725 case PIPE_VIDEO_CAP_SUPPORTED: 726 case PIPE_VIDEO_CAP_ENC_MAX_SLICES_PER_FRAME: 727 case PIPE_VIDEO_CAP_ENC_SLICES_STRUCTURE: 728 case PIPE_VIDEO_CAP_ENC_MAX_REFERENCES_PER_FRAME: 729 { 730 if (d3d12_has_video_encode_support(pscreen, 731 profile, 732 maxLvlEncode, 733 maxResEncode, 734 maxSlices, 735 supportedSliceStructures, 736 maxReferencesPerFrame)) { 737 if (param == PIPE_VIDEO_CAP_MAX_WIDTH) { 738 return maxResEncode.Width; 739 } else if (param == PIPE_VIDEO_CAP_MAX_HEIGHT) { 740 return maxResEncode.Height; 741 } else if (param == PIPE_VIDEO_CAP_MAX_LEVEL) { 742 return maxLvlEncode; 743 } else if (param == PIPE_VIDEO_CAP_SUPPORTED) { 744 return 1; 745 } else if (param == PIPE_VIDEO_CAP_ENC_MAX_SLICES_PER_FRAME) { 746 return maxSlices; 747 } else if (param == PIPE_VIDEO_CAP_ENC_SLICES_STRUCTURE) { 748 return supportedSliceStructures; 749 } else if (param == PIPE_VIDEO_CAP_ENC_MAX_REFERENCES_PER_FRAME) { 750 return maxReferencesPerFrame; 751 } 752 } 753 return 0; 754 } break; 755 case PIPE_VIDEO_CAP_PREFERED_FORMAT: 756 return PIPE_FORMAT_NV12; 757 case PIPE_VIDEO_CAP_PREFERS_INTERLACED: 758 return false; 759 case PIPE_VIDEO_CAP_SUPPORTS_INTERLACED: 760 return false; 761 case PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE: 762 return true; 763 case PIPE_VIDEO_SUPPORTS_CONTIGUOUS_PLANES_MAP: 764 return true; 765 default: 766 debug_printf("[d3d12_screen_get_video_param] unknown video param: %d\n", param); 767 return 0; 768 } 769} 770 771static int 772d3d12_screen_get_video_param(struct pipe_screen *pscreen, 773 enum pipe_video_profile profile, 774 enum pipe_video_entrypoint entrypoint, 775 enum pipe_video_cap param) 776{ 777 if (entrypoint == PIPE_VIDEO_ENTRYPOINT_BITSTREAM) { 778 return d3d12_screen_get_video_param_decode(pscreen, profile, entrypoint, param); 779 } else if (entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) { 780 return d3d12_screen_get_video_param_encode(pscreen, profile, entrypoint, param); 781 } else if (entrypoint == PIPE_VIDEO_ENTRYPOINT_PROCESSING) { 782 return d3d12_screen_get_video_param_postproc(pscreen, profile, entrypoint, param); 783 } 784 return 0; 785} 786 787void 788d3d12_screen_video_init(struct pipe_screen *pscreen) 789{ 790 pscreen->get_video_param = d3d12_screen_get_video_param; 791 pscreen->is_video_format_supported = d3d12_video_buffer_is_format_supported; 792} 793