d3d12: Upgrade to use DirectX-Headers 717 SDK from previous 716 version

Reviewed-by: Pohsiang (John) Hsu <pohhsu@microsoft.com>
Reviewed-by: Jesse Natalie <jenatali@microsoft.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/35263>
This commit is contained in:
Sil Vilerino 2025-05-21 09:27:01 -04:00 committed by Marge Bot
parent 775cd85ed3
commit f28417a7bb
19 changed files with 546 additions and 269 deletions

View file

@ -538,8 +538,13 @@ d3d12_video_encoder_update_picparams_tracking(struct d3d12_video_encoder *pD3D12
struct pipe_video_buffer * srcTexture,
struct pipe_picture_desc * picture)
{
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA currentPicParams =
d3d12_video_encoder_get_current_picture_param_settings(pD3D12Enc);
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams =
d3d12_video_encoder_get_current_picture_param_settings1(pD3D12Enc);
#else
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA currentPicParams =
d3d12_video_encoder_get_current_picture_param_settings(pD3D12Enc);
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
bool bUsedAsReference = false;
@ -547,19 +552,19 @@ d3d12_video_encoder_update_picparams_tracking(struct d3d12_video_encoder *pD3D12
#if VIDEO_CODEC_H264ENC
case PIPE_VIDEO_FORMAT_MPEG4_AVC:
{
d3d12_video_encoder_update_current_frame_pic_params_info_h264(pD3D12Enc, srcTexture, picture, currentPicParams, bUsedAsReference);
d3d12_video_encoder_update_current_frame_pic_params_info_h264(pD3D12Enc, srcTexture, picture, currentPicParams.pH264PicData, bUsedAsReference);
} break;
#endif
#if VIDEO_CODEC_H265ENC
case PIPE_VIDEO_FORMAT_HEVC:
{
d3d12_video_encoder_update_current_frame_pic_params_info_hevc(pD3D12Enc, srcTexture, picture, currentPicParams, bUsedAsReference);
d3d12_video_encoder_update_current_frame_pic_params_info_hevc(pD3D12Enc, srcTexture, picture, currentPicParams.pHEVCPicData, bUsedAsReference);
} break;
#endif
#if VIDEO_CODEC_AV1ENC
case PIPE_VIDEO_FORMAT_AV1:
{
d3d12_video_encoder_update_current_frame_pic_params_info_av1(pD3D12Enc, srcTexture, picture, currentPicParams, bUsedAsReference);
d3d12_video_encoder_update_current_frame_pic_params_info_av1(pD3D12Enc, srcTexture, picture, currentPicParams.pAV1PicData, bUsedAsReference);
} break;
#endif
default:
@ -775,44 +780,75 @@ d3d12_video_encoder_reconfigure_encoder_objects(struct d3d12_video_encoder *pD3D
reCreatedEncoderHeap = true;
}
HRESULT hr = S_OK;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_HEAP_FLAGS heapFlags = D3D12_VIDEO_ENCODER_HEAP_FLAG_NONE;
if (pD3D12Enc->m_currentEncodeCapabilities.m_currentResolutionSupportCaps.DirtyRegions.DirtyRegionsSupportFlags) {
heapFlags |= D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_DIRTY_REGIONS;
}
//
// Prefer individual slice buffers when possible
//
if (pD3D12Enc->m_currentEncodeCapabilities.m_SupportFlags &
D3D12_VIDEO_ENCODER_SUPPORT_FLAG_SUBREGION_NOTIFICATION_ARRAY_OF_BUFFERS_AVAILABLE)
ComPtr<ID3D12VideoDevice4> spVideoDevice4;
if (SUCCEEDED(pD3D12Enc->m_spD3D12VideoDevice->QueryInterface(
IID_PPV_ARGS(spVideoDevice4.GetAddressOf()))))
{
heapFlags |= D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_SUBREGION_NOTIFICATION_ARRAY_OF_BUFFERS;
D3D12_VIDEO_ENCODER_HEAP_FLAGS heapFlags = D3D12_VIDEO_ENCODER_HEAP_FLAG_NONE;
if (pD3D12Enc->m_currentEncodeCapabilities.m_currentResolutionSupportCaps.DirtyRegions.DirtyRegionsSupportFlags) {
heapFlags |= D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_DIRTY_REGIONS;
}
//
// Prefer individual slice buffers when possible
//
if (pD3D12Enc->m_currentEncodeCapabilities.m_SupportFlags &
D3D12_VIDEO_ENCODER_SUPPORT_FLAG_SUBREGION_NOTIFICATION_ARRAY_OF_BUFFERS_AVAILABLE)
{
heapFlags |= D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_SUBREGION_NOTIFICATION_ARRAY_OF_BUFFERS;
}
else if (pD3D12Enc->m_currentEncodeCapabilities.m_SupportFlags &
D3D12_VIDEO_ENCODER_SUPPORT_FLAG_SUBREGION_NOTIFICATION_SINGLE_BUFFER_AVAILABLE)
{
heapFlags |= D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_SUBREGION_NOTIFICATION_SINGLE_BUFFER;
}
D3D12_VIDEO_ENCODER_HEAP_DESC1 heapDesc1 = {
pD3D12Enc->m_NodeMask,
heapFlags,
pD3D12Enc->m_currentEncodeConfig.m_encoderCodecDesc,
d3d12_video_encoder_get_current_profile_desc(pD3D12Enc),
d3d12_video_encoder_get_current_level_desc(pD3D12Enc),
// resolution list count
1,
// resolution list
&pD3D12Enc->m_currentEncodeConfig.m_currentResolution,
// UINT Pow2DownscaleFactor
0,
};
// Create encoder heap
pD3D12Enc->m_spVideoEncoderHeap.Reset();
ComPtr<ID3D12VideoEncoderHeap1> spVideoEncoderHeap1;
hr = spVideoDevice4->CreateVideoEncoderHeap1(&heapDesc1,
IID_PPV_ARGS(spVideoEncoderHeap1.GetAddressOf()));
if (SUCCEEDED(hr))
{
hr = spVideoEncoderHeap1->QueryInterface(IID_PPV_ARGS(pD3D12Enc->m_spVideoEncoderHeap.GetAddressOf()));
}
}
else if (pD3D12Enc->m_currentEncodeCapabilities.m_SupportFlags &
D3D12_VIDEO_ENCODER_SUPPORT_FLAG_SUBREGION_NOTIFICATION_SINGLE_BUFFER_AVAILABLE)
else
#else // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
{
heapFlags |= D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_SUBREGION_NOTIFICATION_SINGLE_BUFFER;
D3D12_VIDEO_ENCODER_HEAP_DESC heapDesc = { pD3D12Enc->m_NodeMask,
D3D12_VIDEO_ENCODER_HEAP_FLAG_NONE,
pD3D12Enc->m_currentEncodeConfig.m_encoderCodecDesc,
d3d12_video_encoder_get_current_profile_desc(pD3D12Enc),
d3d12_video_encoder_get_current_level_desc(pD3D12Enc),
// resolution list count
1,
// resolution list
&pD3D12Enc->m_currentEncodeConfig.m_currentResolution };
// Create encoder heap
pD3D12Enc->m_spVideoEncoderHeap.Reset();
hr = pD3D12Enc->m_spD3D12VideoDevice->CreateVideoEncoderHeap(&heapDesc,
IID_PPV_ARGS(pD3D12Enc->m_spVideoEncoderHeap.GetAddressOf()));
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_HEAP_DESC heapDesc = { pD3D12Enc->m_NodeMask,
heapFlags,
#else
D3D12_VIDEO_ENCODER_HEAP_DESC heapDesc = { pD3D12Enc->m_NodeMask,
D3D12_VIDEO_ENCODER_HEAP_FLAG_NONE,
#endif
pD3D12Enc->m_currentEncodeConfig.m_encoderCodecDesc,
d3d12_video_encoder_get_current_profile_desc(pD3D12Enc),
d3d12_video_encoder_get_current_level_desc(pD3D12Enc),
// resolution list count
1,
// resolution list
&pD3D12Enc->m_currentEncodeConfig.m_currentResolution };
// Create encoder heap
pD3D12Enc->m_spVideoEncoderHeap.Reset();
HRESULT hr = pD3D12Enc->m_spD3D12VideoDevice->CreateVideoEncoderHeap(&heapDesc,
IID_PPV_ARGS(pD3D12Enc->m_spVideoEncoderHeap.GetAddressOf()));
if (FAILED(hr)) {
debug_printf("CreateVideoEncoderHeap failed with HR %x\n", hr);
return false;
@ -958,34 +994,32 @@ d3d12_video_encoder_get_current_slice_param_settings(struct d3d12_video_encoder
return subregionData;
}
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encoder *pD3D12Enc)
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1
d3d12_video_encoder_get_current_picture_param_settings1(struct d3d12_video_encoder *pD3D12Enc)
{
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curPicParamsData = {};
switch (codec) {
#if VIDEO_CODEC_H264ENC
case PIPE_VIDEO_FORMAT_MPEG4_AVC:
{
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curPicParamsData = {};
curPicParamsData.pH264PicData = &pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_H264PicData;
curPicParamsData.DataSize = sizeof(pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_H264PicData);
return curPicParamsData;
} break;
#endif
#if VIDEO_CODEC_H265ENC
case PIPE_VIDEO_FORMAT_HEVC:
{
return ConvertHEVCPicParamsFromProfile(pD3D12Enc->m_currentEncodeConfig.m_encoderProfileDesc.m_HEVCProfile,
&pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData);
curPicParamsData.pHEVCPicData = &pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData;
curPicParamsData.DataSize = sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2);
} break;
#endif
#if VIDEO_CODEC_AV1ENC
case PIPE_VIDEO_FORMAT_AV1:
{
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curPicParamsData = {};
curPicParamsData.pAV1PicData = &pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_AV1PicData;
curPicParamsData.DataSize = sizeof(pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_AV1PicData);
return curPicParamsData;
} break;
#endif
default:
@ -993,6 +1027,44 @@ d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encode
unreachable("Unsupported pipe_video_format");
} break;
}
return curPicParamsData;
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encoder *pD3D12Enc)
{
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curPicParamsData = {};
switch (codec) {
#if VIDEO_CODEC_H264ENC
case PIPE_VIDEO_FORMAT_MPEG4_AVC:
{
curPicParamsData.pH264PicData = &pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_H264PicData;
curPicParamsData.DataSize = sizeof(pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_H264PicData);
} break;
#endif
#if VIDEO_CODEC_H265ENC
case PIPE_VIDEO_FORMAT_HEVC:
{
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 binary-compatible with D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC
curPicParamsData.pHEVCPicData = reinterpret_cast<D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC*>(&pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData);
curPicParamsData.DataSize = sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC);
} break;
#endif
#if VIDEO_CODEC_AV1ENC
case PIPE_VIDEO_FORMAT_AV1:
{
curPicParamsData.pAV1PicData = &pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_AV1PicData;
curPicParamsData.DataSize = sizeof(pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_AV1PicData);
} break;
#endif
default:
{
unreachable("Unsupported pipe_video_format");
} break;
}
return curPicParamsData;
}
D3D12_VIDEO_ENCODER_RATE_CONTROL
@ -3166,6 +3238,18 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
if (SUCCEEDED(pD3D12Enc->m_spEncodeCommandList->QueryInterface(
IID_PPV_ARGS(spEncodeCommandList4.GetAddressOf())))) {
// Update current frame pic params state after reconfiguring above.
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams1 =
d3d12_video_encoder_get_current_picture_param_settings1(pD3D12Enc);
if (!pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data1(currentPicParams1)) {
debug_printf("[d3d12_video_encoder_encode_bitstream] get_current_frame_picture_control_data1 failed!\n");
pD3D12Enc->m_inflightResourcesPool[d3d12_video_encoder_pool_current_index(pD3D12Enc)].encode_result = PIPE_VIDEO_FEEDBACK_METADATA_ENCODE_FLAG_FAILED;
pD3D12Enc->m_spEncodedFrameMetadata[d3d12_video_encoder_metadata_current_index(pD3D12Enc)].encode_result = PIPE_VIDEO_FEEDBACK_METADATA_ENCODE_FLAG_FAILED;
assert(false);
return;
}
std::vector<D3D12_RESOURCE_BARRIER> pResolveInputDataBarriers;
D3D12_VIDEO_ENCODER_DIRTY_REGIONS dirtyRegions = { };
dirtyRegions.MapSource = pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapSource;
@ -3307,7 +3391,7 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
ResolveInputData.MotionVectors.pMotionVectorMapsSubresources = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.pMotionVectorMapsSubresources;
ResolveInputData.MotionVectors.pMotionVectorMapsMetadataSubresources = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.pMotionVectorMapsMetadataSubresources;
ResolveInputData.MotionVectors.MotionUnitPrecision = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.MotionUnitPrecision;
ResolveInputData.MotionVectors.PictureControlConfiguration = currentPicParams;
ResolveInputData.MotionVectors.PictureControlConfiguration = currentPicParams1;
D3D12_VIDEO_ENCODER_RESOLVE_INPUT_PARAM_LAYOUT_INPUT_ARGUMENTS resolveInputParamLayoutInput =
{
@ -3377,13 +3461,13 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
pD3D12Enc->m_currentEncodeConfig.m_encoderSliceConfigMode,
d3d12_video_encoder_get_current_slice_param_settings(pD3D12Enc),
d3d12_video_encoder_get_current_gop_desc(pD3D12Enc) },
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_DESC/D3D12_VIDEO_ENCODER_PICTURE_CONTROL_DESC1
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_DESC1
{ // uint32_t IntraRefreshFrameIndex;
pD3D12Enc->m_currentEncodeConfig.m_IntraRefreshCurrentFrameIndex,
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_FLAGS Flags;
picCtrlFlags,
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA PictureControlCodecData;
currentPicParams,
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 PictureControlCodecData;
currentPicParams1,
// D3D12_VIDEO_ENCODE_REFERENCE_FRAMES ReferenceFrames;
referenceFramesDescriptor,
// D3D12_VIDEO_ENCODER_FRAME_MOTION_VECTORS MotionVectors;
@ -3392,6 +3476,8 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
dirtyRegions,
// D3D12_VIDEO_ENCODER_QUANTIZATION_OPAQUE_MAP QuantizationTextureMap;
QuantizationTextureMap,
// D3D12_VIDEO_ENCODER_FRAME_ANALYSIS FrameAnalysis;
{ },
},
pInputVideoD3D12Res,
inputVideoD3D12Subresource,
@ -3576,7 +3662,9 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
// D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE
reconPicOutputTextureDesc,
// D3D12_VIDEO_ENCODER_ENCODE_OPERATION_METADATA_BUFFER
{ pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].m_spMetadataOutputBuffer.Get(), 0 }
{ pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].m_spMetadataOutputBuffer.Get(), 0 },
// D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE FrameAnalysisReconstructedPicture;
{},
};
debug_printf("DX12 EncodeFrame submission fenceValue %" PRIu64 " current_metadata_slot %" PRIu64 " - POC %d picture_type %s LayoutMode %d SlicesCount %d IRMode %d IRIndex %d\n",
@ -3589,9 +3677,13 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
static_cast<uint32_t>(pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].m_associatedEncodeConfig.m_IntraRefresh.Mode),
pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].m_associatedEncodeConfig.m_IntraRefreshCurrentFrameIndex);
ComPtr<ID3D12VideoEncoderHeap1> spVideoEncoderHeap1;
pD3D12Enc->m_spVideoEncoderHeap->QueryInterface(IID_PPV_ARGS(spVideoEncoderHeap1.GetAddressOf()));
// Record EncodeFrame
spEncodeCommandList4->EncodeFrame1(pD3D12Enc->m_spVideoEncoder.Get(),
pD3D12Enc->m_spVideoEncoderHeap.Get(),
spVideoEncoderHeap1.Get(),
&inputStreamArguments,
&outputStreamArguments);
@ -3661,6 +3753,10 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
d12_gpu_stats_satd_map,
// ID3D12Resource *pOutputBitAllocationMap;
d12_gpu_stats_rc_bitallocation_map,
// D3D12_VIDEO_ENCODER_ENCODE_OPERATION_METADATA_BUFFER ResolvedFramePSNRData;
{},
// D3D12_VIDEO_ENCODER_ENCODE_OPERATION_METADATA_BUFFER ResolvedSubregionsPSNRData;
{},
};
spEncodeCommandList4->ResolveEncoderOutputMetadata1(&inputMetadataCmd, &outputMetadataCmd);
@ -3728,7 +3824,7 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
pD3D12Enc->m_currentEncodeConfig.m_encoderSliceConfigMode,
d3d12_video_encoder_get_current_slice_param_settings(pD3D12Enc),
d3d12_video_encoder_get_current_gop_desc(pD3D12Enc) },
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_DESC/D3D12_VIDEO_ENCODER_PICTURE_CONTROL_DESC1
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_DESC
{ // uint32_t IntraRefreshFrameIndex;
pD3D12Enc->m_currentEncodeConfig.m_IntraRefreshCurrentFrameIndex,
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_FLAGS Flags;

View file

@ -296,7 +296,11 @@ struct D3D12EncodeConfiguration
union
{
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264 m_H264PicData;
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 m_HEVCPicData;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 m_HEVCPicData;
#else
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC m_HEVCPicData;
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_CODEC_DATA m_AV1PicData;
} m_encoderPicParamsDesc = {};
@ -364,7 +368,7 @@ struct D3D12EncodeConfiguration
std::vector<ID3D12Resource*> ppMotionVectorMapsMetadata;
UINT* pMotionVectorMapsMetadataSubresources;
D3D12_VIDEO_ENCODER_FRAME_INPUT_MOTION_UNIT_PRECISION MotionUnitPrecision;
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA PictureControlConfiguration;
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 PictureControlConfiguration;
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLVE_INPUT_PARAM_LAYOUT capInputLayoutMotionVectors;
} MapInfo;
};
@ -536,7 +540,7 @@ struct d3d12_video_encoder
// we need to keep a reference alive to the ones that
// are currently in-flight
ComPtr<ID3D12VideoEncoder> m_spEncoder;
ComPtr<ID3D12VideoEncoderHeap> m_spEncoderHeap;
ComPtr<ID3D12VideoEncoderHeap> m_spEncoderHeap;
std::shared_ptr<d3d12_video_dpb_storage_manager_interface> m_References;
ComPtr<ID3D12CommandAllocator> m_spCommandAllocator;
@ -583,6 +587,10 @@ d3d12_video_encoder_reconfigure_encoder_objects(struct d3d12_video_encoder *pD3D
struct pipe_picture_desc * picture);
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encoder *pD3D12Enc);
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1
d3d12_video_encoder_get_current_picture_param_settings1(struct d3d12_video_encoder *pD3D12Enc);
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_LEVEL_SETTING
d3d12_video_encoder_get_current_level_desc(struct d3d12_video_encoder *pD3D12Enc);
D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION

View file

@ -1215,7 +1215,7 @@ void
d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_encoder *pD3D12Enc,
struct pipe_video_buffer *srcTexture,
struct pipe_picture_desc *picture,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &picParams,
D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_CODEC_DATA *pAV1PicData,
bool &bUsedAsReference)
{
struct pipe_av1_enc_picture_desc *pAV1Pic = (struct pipe_av1_enc_picture_desc *) picture;
@ -1225,44 +1225,44 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
bUsedAsReference = pD3D12Enc->m_currentEncodeConfig.m_bUsedAsReference;
// D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAGS Flags;
picParams.pAV1PicData->Flags = D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_NONE;
pAV1PicData->Flags = D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_NONE;
if (pAV1Pic->error_resilient_mode)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_ERROR_RESILIENT_MODE;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_ERROR_RESILIENT_MODE;
if (pAV1Pic->disable_cdf_update)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_DISABLE_CDF_UPDATE;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_DISABLE_CDF_UPDATE;
if (pAV1Pic->palette_mode_enable)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_PALETTE_ENCODING;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_PALETTE_ENCODING;
// Override if required feature
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.RequiredFeatureFlags &
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_PALETTE_ENCODING) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_PALETTE_ENCODING\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_PALETTE_ENCODING;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_PALETTE_ENCODING;
}
if (pAV1Pic->skip_mode_present)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_SKIP_MODE;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_SKIP_MODE;
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.RequiredFeatureFlags &
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_SKIP_MODE_PRESENT) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_SKIP_MODE\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_SKIP_MODE;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_SKIP_MODE;
}
if (pAV1Pic->use_ref_frame_mvs)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FRAME_REFERENCE_MOTION_VECTORS;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FRAME_REFERENCE_MOTION_VECTORS;
// Override if required feature
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.RequiredFeatureFlags &
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_FRAME_REFERENCE_MOTION_VECTORS) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FRAME_REFERENCE_MOTION_VECTORS\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FRAME_REFERENCE_MOTION_VECTORS;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FRAME_REFERENCE_MOTION_VECTORS;
}
// No pipe flag for force_integer_mv (D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FORCE_INTEGER_MOTION_VECTORS)
@ -1271,25 +1271,25 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_FORCED_INTEGER_MOTION_VECTORS) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FORCE_INTEGER_MOTION_VECTORS\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FORCE_INTEGER_MOTION_VECTORS;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_FORCE_INTEGER_MOTION_VECTORS;
}
if (pAV1Pic->allow_intrabc)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_INTRA_BLOCK_COPY;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_INTRA_BLOCK_COPY;
// Override if required feature
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.RequiredFeatureFlags &
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_INTRA_BLOCK_COPY) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_INTRA_BLOCK_COPY\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_INTRA_BLOCK_COPY;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_INTRA_BLOCK_COPY;
}
if (pAV1Pic->use_superres)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_USE_SUPER_RESOLUTION;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_USE_SUPER_RESOLUTION;
if (pAV1Pic->disable_frame_end_update_cdf)
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_DISABLE_FRAME_END_UPDATE_CDF;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_DISABLE_FRAME_END_UPDATE_CDF;
// No pipe flag for D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_AUTO
// choose default based on required/supported underlying codec flags
@ -1297,7 +1297,7 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_AUTO_SEGMENTATION) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_AUTO\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_AUTO;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_AUTO;
}
// No pipe flag for D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_CUSTOM
@ -1306,7 +1306,7 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_CUSTOM_SEGMENTATION) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_CUSTOM\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_CUSTOM;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_FRAME_SEGMENTATION_CUSTOM;
assert(false); // Not implemented
}
@ -1316,14 +1316,14 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_WARPED_MOTION) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_WARPED_MOTION\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_WARPED_MOTION;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ENABLE_WARPED_MOTION;
}
// Only enable if supported (there is no PIPE/VA cap flag for reduced_tx_set)
if ((pAV1Pic->reduced_tx_set) &&
(pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedFeatureFlags &
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_REDUCED_TX_SET) != 0) {
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_REDUCED_TX_SET;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_REDUCED_TX_SET;
}
// Override if required feature
@ -1331,14 +1331,14 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_REDUCED_TX_SET) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_REDUCED_TX_SET\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_REDUCED_TX_SET;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_REDUCED_TX_SET;
}
// Only enable if supported
if ((pAV1Pic->allow_high_precision_mv) &&
(pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedFeatureFlags &
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_ALLOW_HIGH_PRECISION_MV) != 0) {
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_HIGH_PRECISION_MV;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_HIGH_PRECISION_MV;
}
// Override if required feature
@ -1346,7 +1346,7 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_ALLOW_HIGH_PRECISION_MV) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_HIGH_PRECISION_MV\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_HIGH_PRECISION_MV;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_ALLOW_HIGH_PRECISION_MV;
}
// No pipe flag for is_motion_mode_switchable (D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_MOTION_MODE_SWITCHABLE)
@ -1355,44 +1355,44 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_FEATURE_FLAG_MOTION_MODE_SWITCHABLE) != 0) {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Overriding required feature "
"D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_MOTION_MODE_SWITCHABLE\n");
picParams.pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_MOTION_MODE_SWITCHABLE;
pAV1PicData->Flags |= D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_FLAG_MOTION_MODE_SWITCHABLE;
}
// D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE FrameType;
// AV1 spec matches w/D3D12 enum definition
picParams.pAV1PicData->FrameType = static_cast<D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE>(pAV1Pic->frame_type);
pAV1PicData->FrameType = static_cast<D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE>(pAV1Pic->frame_type);
if (picParams.pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_KEY_FRAME)
if (pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_KEY_FRAME)
debug_printf("Encoding FrameType: D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_KEY_FRAME\n");
if (picParams.pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTER_FRAME)
if (pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTER_FRAME)
debug_printf("Encoding FrameType: D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTER_FRAME\n");
if (picParams.pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTRA_ONLY_FRAME)
if (pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTRA_ONLY_FRAME)
debug_printf("Encoding FrameType: D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTRA_ONLY_FRAME\n");
if (picParams.pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_SWITCH_FRAME)
if (pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_SWITCH_FRAME)
debug_printf("Encoding FrameType: D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_SWITCH_FRAME\n");
// D3D12_VIDEO_ENCODER_AV1_COMP_PREDICTION_TYPE CompoundPredictionType;
picParams.pAV1PicData->CompoundPredictionType = (pAV1Pic->compound_reference_mode == 0) ?
pAV1PicData->CompoundPredictionType = (pAV1Pic->compound_reference_mode == 0) ?
D3D12_VIDEO_ENCODER_AV1_COMP_PREDICTION_TYPE_SINGLE_REFERENCE :
D3D12_VIDEO_ENCODER_AV1_COMP_PREDICTION_TYPE_COMPOUND_REFERENCE;
// D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS InterpolationFilter;
// AV1 spec matches w/D3D12 enum definition
picParams.pAV1PicData->InterpolationFilter =
pAV1PicData->InterpolationFilter =
static_cast<D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS>(pAV1Pic->interpolation_filter);
// Workaround for apps sending interpolation_filter values not supported even when reporting
// them in pipe_av1_enc_cap_features_ext1.interpolation_filter. If D3D12 driver doesn't support
// requested InterpolationFilter, fallback to the first supported by D3D12 driver
if ( (pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedInterpolationFilters &
(1 << picParams.pAV1PicData->InterpolationFilter)) == 0 ) { /* See definition of D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS_FLAGS */
(1 << pAV1PicData->InterpolationFilter)) == 0 ) { /* See definition of D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS_FLAGS */
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Requested interpolation_filter"
" not supported in pipe_av1_enc_cap_features_ext1.interpolation_filter"
", auto selecting from D3D12 SupportedInterpolationFilters...");
for(uint8_t i = D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS_EIGHTTAP; i <= D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS_SWITCHABLE; i++) {
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedInterpolationFilters &
(1 << i)) /* See definition of D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS_FLAGS */ != 0) {
picParams.pAV1PicData->InterpolationFilter = static_cast<D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS>(i);
pAV1PicData->InterpolationFilter = static_cast<D3D12_VIDEO_ENCODER_AV1_INTERPOLATION_FILTERS>(i);
break;
}
}
@ -1401,136 +1401,136 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
// D3D12_VIDEO_ENCODER_AV1_RESTORATION_CONFIG FrameRestorationConfig;
// AV1 spec matches w/D3D12 FrameRestorationType enum definition
picParams.pAV1PicData->FrameRestorationConfig.FrameRestorationType[0] =
pAV1PicData->FrameRestorationConfig.FrameRestorationType[0] =
static_cast<D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE>(pAV1Pic->restoration.yframe_restoration_type);
picParams.pAV1PicData->FrameRestorationConfig.FrameRestorationType[1] =
pAV1PicData->FrameRestorationConfig.FrameRestorationType[1] =
static_cast<D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE>(pAV1Pic->restoration.cbframe_restoration_type);
picParams.pAV1PicData->FrameRestorationConfig.FrameRestorationType[2] =
pAV1PicData->FrameRestorationConfig.FrameRestorationType[2] =
static_cast<D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE>(pAV1Pic->restoration.crframe_restoration_type);
if (picParams.pAV1PicData->FrameRestorationConfig.FrameRestorationType[0] !=
if (pAV1PicData->FrameRestorationConfig.FrameRestorationType[0] !=
D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE_DISABLED) {
picParams.pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[0] =
pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[0] =
d3d12_video_encoder_looprestorationsize_uint_to_d3d12_av1(1 << (6 + pAV1Pic->restoration.lr_unit_shift));
}
if (picParams.pAV1PicData->FrameRestorationConfig.FrameRestorationType[1] !=
if (pAV1PicData->FrameRestorationConfig.FrameRestorationType[1] !=
D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE_DISABLED) {
picParams.pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[1] =
pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[1] =
d3d12_video_encoder_looprestorationsize_uint_to_d3d12_av1(
1 << (6 + pAV1Pic->restoration.lr_unit_shift - pAV1Pic->restoration.lr_uv_shift));
}
if (picParams.pAV1PicData->FrameRestorationConfig.FrameRestorationType[2] !=
if (pAV1PicData->FrameRestorationConfig.FrameRestorationType[2] !=
D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE_DISABLED) {
picParams.pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[2] =
pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[2] =
d3d12_video_encoder_looprestorationsize_uint_to_d3d12_av1(
1 << (6 + pAV1Pic->restoration.lr_unit_shift - pAV1Pic->restoration.lr_uv_shift));
}
// D3D12_VIDEO_ENCODER_AV1_TX_MODE TxMode;
// AV1 spec matches w/D3D12 enum definition
picParams.pAV1PicData->TxMode = static_cast<D3D12_VIDEO_ENCODER_AV1_TX_MODE>(pAV1Pic->tx_mode);
pAV1PicData->TxMode = static_cast<D3D12_VIDEO_ENCODER_AV1_TX_MODE>(pAV1Pic->tx_mode);
// Workaround for mismatch between VAAPI/D3D12 and TxMode support for all/some frame types
// If D3D12 driver doesn't support requested TxMode, fallback to the first supported by D3D12
// driver for the requested frame type
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedTxModes[picParams.pAV1PicData->FrameType] &
(1 << picParams.pAV1PicData->TxMode)) == 0) /* See definition of D3D12_VIDEO_ENCODER_AV1_TX_MODE_FLAGS */ {
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedTxModes[pAV1PicData->FrameType] &
(1 << pAV1PicData->TxMode)) == 0) /* See definition of D3D12_VIDEO_ENCODER_AV1_TX_MODE_FLAGS */ {
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Requested tx_mode not supported"
", auto selecting from D3D12 SupportedTxModes for current frame type...");
for(uint8_t i = D3D12_VIDEO_ENCODER_AV1_TX_MODE_ONLY4x4; i <= D3D12_VIDEO_ENCODER_AV1_TX_MODE_SELECT; i++) {
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedTxModes[picParams.pAV1PicData->FrameType] &
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_AV1CodecCaps.SupportedTxModes[pAV1PicData->FrameType] &
(1 << i)) /* See definition of D3D12_VIDEO_ENCODER_AV1_TX_MODE_FLAGS */ != 0) {
picParams.pAV1PicData->TxMode = static_cast<D3D12_VIDEO_ENCODER_AV1_TX_MODE>(i);
pAV1PicData->TxMode = static_cast<D3D12_VIDEO_ENCODER_AV1_TX_MODE>(i);
break;
}
}
}
// UINT SuperResDenominator;
picParams.pAV1PicData->SuperResDenominator = pAV1Pic->superres_scale_denominator;
pAV1PicData->SuperResDenominator = pAV1Pic->superres_scale_denominator;
// UINT OrderHint;
picParams.pAV1PicData->OrderHint = pAV1Pic->order_hint;
pAV1PicData->OrderHint = pAV1Pic->order_hint;
// UINT PictureIndex - Substract the last_key_frame_num to make it modulo KEY frame
picParams.pAV1PicData->PictureIndex = pAV1Pic->frame_num - pAV1Pic->last_key_frame_num;
pAV1PicData->PictureIndex = pAV1Pic->frame_num - pAV1Pic->last_key_frame_num;
// UINT TemporalLayerIndexPlus1;
assert(pAV1Pic->temporal_id == pAV1Pic->tg_obu_header.temporal_id);
picParams.pAV1PicData->TemporalLayerIndexPlus1 = pAV1Pic->temporal_id + 1;
pAV1PicData->TemporalLayerIndexPlus1 = pAV1Pic->temporal_id + 1;
// UINT SpatialLayerIndexPlus1;
picParams.pAV1PicData->SpatialLayerIndexPlus1 = pAV1Pic->tg_obu_header.spatial_id + 1;
pAV1PicData->SpatialLayerIndexPlus1 = pAV1Pic->tg_obu_header.spatial_id + 1;
//
// Reference Pictures
//
{
for (uint8_t i = 0; i < ARRAY_SIZE(picParams.pAV1PicData->ReferenceIndices); i++) {
picParams.pAV1PicData->ReferenceIndices[i] = pAV1Pic->ref_frame_idx[i];
for (uint8_t i = 0; i < ARRAY_SIZE(pAV1PicData->ReferenceIndices); i++) {
pAV1PicData->ReferenceIndices[i] = pAV1Pic->ref_frame_idx[i];
}
bool FrameIsIntra = (picParams.pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTRA_ONLY_FRAME ||
picParams.pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_KEY_FRAME);
bool FrameIsIntra = (pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_INTRA_ONLY_FRAME ||
pAV1PicData->FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_KEY_FRAME);
if (FrameIsIntra)
picParams.pAV1PicData->PrimaryRefFrame = 7; /* PRIMARY_REF_NONE */
pAV1PicData->PrimaryRefFrame = 7; /* PRIMARY_REF_NONE */
else
picParams.pAV1PicData->PrimaryRefFrame = pAV1Pic->primary_ref_frame;
pAV1PicData->PrimaryRefFrame = pAV1Pic->primary_ref_frame;
debug_printf("App requested primary_ref_frame: %" PRIu32 "\n", pAV1Pic->primary_ref_frame);
picParams.pAV1PicData->RefreshFrameFlags = pAV1Pic->refresh_frame_flags;
pAV1PicData->RefreshFrameFlags = pAV1Pic->refresh_frame_flags;
}
// D3D12_VIDEO_ENCODER_CODEC_AV1_LOOP_FILTER_CONFIG LoopFilter;
picParams.pAV1PicData->LoopFilter.LoopFilterLevel[0] = pAV1Pic->loop_filter.filter_level[0];
picParams.pAV1PicData->LoopFilter.LoopFilterLevel[1] = pAV1Pic->loop_filter.filter_level[1];
picParams.pAV1PicData->LoopFilter.LoopFilterLevelU = pAV1Pic->loop_filter.filter_level_u;
picParams.pAV1PicData->LoopFilter.LoopFilterLevelV = pAV1Pic->loop_filter.filter_level_v;
picParams.pAV1PicData->LoopFilter.LoopFilterSharpnessLevel = pAV1Pic->loop_filter.sharpness_level;
picParams.pAV1PicData->LoopFilter.LoopFilterDeltaEnabled = pAV1Pic->loop_filter.mode_ref_delta_enabled;
picParams.pAV1PicData->LoopFilter.UpdateRefDelta = pAV1Pic->loop_filter.mode_ref_delta_update;
if (picParams.pAV1PicData->LoopFilter.UpdateRefDelta) {
for (uint8_t i = 0; i < ARRAY_SIZE(picParams.pAV1PicData->LoopFilter.RefDeltas); i++) {
picParams.pAV1PicData->LoopFilter.RefDeltas[i] = pAV1Pic->loop_filter.ref_deltas[i];
pAV1PicData->LoopFilter.LoopFilterLevel[0] = pAV1Pic->loop_filter.filter_level[0];
pAV1PicData->LoopFilter.LoopFilterLevel[1] = pAV1Pic->loop_filter.filter_level[1];
pAV1PicData->LoopFilter.LoopFilterLevelU = pAV1Pic->loop_filter.filter_level_u;
pAV1PicData->LoopFilter.LoopFilterLevelV = pAV1Pic->loop_filter.filter_level_v;
pAV1PicData->LoopFilter.LoopFilterSharpnessLevel = pAV1Pic->loop_filter.sharpness_level;
pAV1PicData->LoopFilter.LoopFilterDeltaEnabled = pAV1Pic->loop_filter.mode_ref_delta_enabled;
pAV1PicData->LoopFilter.UpdateRefDelta = pAV1Pic->loop_filter.mode_ref_delta_update;
if (pAV1PicData->LoopFilter.UpdateRefDelta) {
for (uint8_t i = 0; i < ARRAY_SIZE(pAV1PicData->LoopFilter.RefDeltas); i++) {
pAV1PicData->LoopFilter.RefDeltas[i] = pAV1Pic->loop_filter.ref_deltas[i];
}
}
picParams.pAV1PicData->LoopFilter.UpdateModeDelta = pAV1Pic->loop_filter.mode_ref_delta_update;
if (picParams.pAV1PicData->LoopFilter.UpdateModeDelta) {
for (uint8_t i = 0; i < ARRAY_SIZE(picParams.pAV1PicData->LoopFilter.ModeDeltas); i++) {
picParams.pAV1PicData->LoopFilter.ModeDeltas[i] = pAV1Pic->loop_filter.mode_deltas[i];
pAV1PicData->LoopFilter.UpdateModeDelta = pAV1Pic->loop_filter.mode_ref_delta_update;
if (pAV1PicData->LoopFilter.UpdateModeDelta) {
for (uint8_t i = 0; i < ARRAY_SIZE(pAV1PicData->LoopFilter.ModeDeltas); i++) {
pAV1PicData->LoopFilter.ModeDeltas[i] = pAV1Pic->loop_filter.mode_deltas[i];
}
}
// D3D12_VIDEO_ENCODER_CODEC_AV1_LOOP_FILTER_DELTA_CONFIG LoopFilterDelta;
picParams.pAV1PicData->LoopFilterDelta.DeltaLFMulti = pAV1Pic->loop_filter.delta_lf_multi;
picParams.pAV1PicData->LoopFilterDelta.DeltaLFPresent = pAV1Pic->loop_filter.delta_lf_present;
picParams.pAV1PicData->LoopFilterDelta.DeltaLFRes = pAV1Pic->loop_filter.delta_lf_res;
pAV1PicData->LoopFilterDelta.DeltaLFMulti = pAV1Pic->loop_filter.delta_lf_multi;
pAV1PicData->LoopFilterDelta.DeltaLFPresent = pAV1Pic->loop_filter.delta_lf_present;
pAV1PicData->LoopFilterDelta.DeltaLFRes = pAV1Pic->loop_filter.delta_lf_res;
// D3D12_VIDEO_ENCODER_CODEC_AV1_QUANTIZATION_CONFIG Quantization;
picParams.pAV1PicData->Quantization.BaseQIndex = pAV1Pic->quantization.base_qindex;
picParams.pAV1PicData->Quantization.YDCDeltaQ = pAV1Pic->quantization.y_dc_delta_q;
picParams.pAV1PicData->Quantization.UDCDeltaQ = pAV1Pic->quantization.u_dc_delta_q;
picParams.pAV1PicData->Quantization.UACDeltaQ = pAV1Pic->quantization.u_ac_delta_q;
picParams.pAV1PicData->Quantization.VDCDeltaQ = pAV1Pic->quantization.v_dc_delta_q;
picParams.pAV1PicData->Quantization.VACDeltaQ = pAV1Pic->quantization.v_ac_delta_q;
picParams.pAV1PicData->Quantization.UsingQMatrix = pAV1Pic->quantization.using_qmatrix;
picParams.pAV1PicData->Quantization.QMY = pAV1Pic->quantization.qm_y;
picParams.pAV1PicData->Quantization.QMU = pAV1Pic->quantization.qm_u;
picParams.pAV1PicData->Quantization.QMV = pAV1Pic->quantization.qm_v;
pAV1PicData->Quantization.BaseQIndex = pAV1Pic->quantization.base_qindex;
pAV1PicData->Quantization.YDCDeltaQ = pAV1Pic->quantization.y_dc_delta_q;
pAV1PicData->Quantization.UDCDeltaQ = pAV1Pic->quantization.u_dc_delta_q;
pAV1PicData->Quantization.UACDeltaQ = pAV1Pic->quantization.u_ac_delta_q;
pAV1PicData->Quantization.VDCDeltaQ = pAV1Pic->quantization.v_dc_delta_q;
pAV1PicData->Quantization.VACDeltaQ = pAV1Pic->quantization.v_ac_delta_q;
pAV1PicData->Quantization.UsingQMatrix = pAV1Pic->quantization.using_qmatrix;
pAV1PicData->Quantization.QMY = pAV1Pic->quantization.qm_y;
pAV1PicData->Quantization.QMU = pAV1Pic->quantization.qm_u;
pAV1PicData->Quantization.QMV = pAV1Pic->quantization.qm_v;
// D3D12_VIDEO_ENCODER_CODEC_AV1_QUANTIZATION_DELTA_CONFIG QuantizationDelta;
picParams.pAV1PicData->QuantizationDelta.DeltaQPresent = pAV1Pic->quantization.delta_q_present;
picParams.pAV1PicData->QuantizationDelta.DeltaQRes = pAV1Pic->quantization.delta_q_res;
pAV1PicData->QuantizationDelta.DeltaQPresent = pAV1Pic->quantization.delta_q_present;
pAV1PicData->QuantizationDelta.DeltaQRes = pAV1Pic->quantization.delta_q_res;
// D3D12_VIDEO_ENCODER_AV1_CDEF_CONFIG CDEF;
picParams.pAV1PicData->CDEF.CdefBits = pAV1Pic->cdef.cdef_bits;
picParams.pAV1PicData->CDEF.CdefDampingMinus3 = pAV1Pic->cdef.cdef_damping_minus_3;
pAV1PicData->CDEF.CdefBits = pAV1Pic->cdef.cdef_bits;
pAV1PicData->CDEF.CdefDampingMinus3 = pAV1Pic->cdef.cdef_damping_minus_3;
for (uint32_t i = 0; i < 8; i++) {
picParams.pAV1PicData->CDEF.CdefYPriStrength[i] = (pAV1Pic->cdef.cdef_y_strengths[i] >> 2);
picParams.pAV1PicData->CDEF.CdefYSecStrength[i] = (pAV1Pic->cdef.cdef_y_strengths[i] & 0x03);
picParams.pAV1PicData->CDEF.CdefUVPriStrength[i] = (pAV1Pic->cdef.cdef_uv_strengths[i] >> 2);
picParams.pAV1PicData->CDEF.CdefUVSecStrength[i] = (pAV1Pic->cdef.cdef_uv_strengths[i] & 0x03);
pAV1PicData->CDEF.CdefYPriStrength[i] = (pAV1Pic->cdef.cdef_y_strengths[i] >> 2);
pAV1PicData->CDEF.CdefYSecStrength[i] = (pAV1Pic->cdef.cdef_y_strengths[i] & 0x03);
pAV1PicData->CDEF.CdefUVPriStrength[i] = (pAV1Pic->cdef.cdef_uv_strengths[i] >> 2);
pAV1PicData->CDEF.CdefUVSecStrength[i] = (pAV1Pic->cdef.cdef_uv_strengths[i] & 0x03);
}
//
@ -1601,8 +1601,8 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE curFilter =
static_cast<D3D12_VIDEO_ENCODER_AV1_RESTORATION_TYPE>(filterIdx + 1);
picParams.pAV1PicData->FrameRestorationConfig.FrameRestorationType[planeIdx] = curFilter;
picParams.pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[planeIdx] =
pAV1PicData->FrameRestorationConfig.FrameRestorationType[planeIdx] = curFilter;
pAV1PicData->FrameRestorationConfig.LoopRestorationPixelSize[planeIdx] =
static_cast<D3D12_VIDEO_ENCODER_AV1_RESTORATION_TILESIZE>(
curFilterSize); /* loop uses enum type */
}
@ -1612,6 +1612,9 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
}
}
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picParams = {};
picParams.pAV1PicData = pAV1PicData;
picParams.DataSize = sizeof(*pAV1PicData);
pD3D12Enc->m_upDPBManager->begin_frame(picParams, bUsedAsReference, picture);
pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data(picParams);
@ -1642,8 +1645,8 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
av1_min_delta_qp,
av1_max_delta_qp,
pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap16Bit);
picParams.pAV1PicData->pRateControlQPMap = pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap16Bit.data();
picParams.pAV1PicData->QPMapValuesCount = static_cast<UINT>(pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap16Bit.size());
pAV1PicData->pRateControlQPMap = pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap16Bit.data();
pAV1PicData->QPMapValuesCount = static_cast<UINT>(pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap16Bit.size());
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
}

View file

@ -40,7 +40,7 @@ void
d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_encoder *pD3D12Enc,
struct pipe_video_buffer *srcTexture,
struct pipe_picture_desc *picture,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &picParams,
D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_CODEC_DATA* pAV1PicData,
bool &bUsedAsReference);
unsigned

View file

@ -346,7 +346,7 @@ void
d3d12_video_encoder_update_current_frame_pic_params_info_h264(struct d3d12_video_encoder *pD3D12Enc,
struct pipe_video_buffer *srcTexture,
struct pipe_picture_desc *picture,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &picParams,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264* pH264PicData,
bool &bUsedAsReference)
{
struct pipe_h264_enc_picture_desc *h264Pic = (struct pipe_h264_enc_picture_desc *) picture;
@ -360,22 +360,22 @@ d3d12_video_encoder_update_current_frame_pic_params_info_h264(struct d3d12_video
if (pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_H264CodecCaps.SupportFlags &
D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_H264_FLAG_NUM_REF_IDX_ACTIVE_OVERRIDE_FLAG_SLICE_SUPPORT)
{
picParams.pH264PicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264_FLAG_REQUEST_NUM_REF_IDX_ACTIVE_OVERRIDE_FLAG_SLICE;
pH264PicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264_FLAG_REQUEST_NUM_REF_IDX_ACTIVE_OVERRIDE_FLAG_SLICE;
}
//
// These need to be set here so they're available for SPS/PPS header building (reference manager updates after that, for slice header params)
//
picParams.pH264PicData->TemporalLayerIndex = h264Pic->pic_ctrl.temporal_id;
picParams.pH264PicData->pic_parameter_set_id = pH264BitstreamBuilder->get_active_pps().pic_parameter_set_id;
picParams.pH264PicData->List0ReferenceFramesCount = 0;
picParams.pH264PicData->List1ReferenceFramesCount = 0;
pH264PicData->TemporalLayerIndex = h264Pic->pic_ctrl.temporal_id;
pH264PicData->pic_parameter_set_id = pH264BitstreamBuilder->get_active_pps().pic_parameter_set_id;
pH264PicData->List0ReferenceFramesCount = 0;
pH264PicData->List1ReferenceFramesCount = 0;
if ((h264Pic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_P) ||
(h264Pic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B))
picParams.pH264PicData->List0ReferenceFramesCount = h264Pic->num_ref_idx_l0_active_minus1 + 1;
pH264PicData->List0ReferenceFramesCount = h264Pic->num_ref_idx_l0_active_minus1 + 1;
if (h264Pic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B)
picParams.pH264PicData->List1ReferenceFramesCount = h264Pic->num_ref_idx_l1_active_minus1 + 1;
pH264PicData->List1ReferenceFramesCount = h264Pic->num_ref_idx_l1_active_minus1 + 1;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
if (pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.AppRequested)
@ -389,11 +389,14 @@ d3d12_video_encoder_update_current_frame_pic_params_info_h264(struct d3d12_video
h264_min_delta_qp,
h264_max_delta_qp,
pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit);
picParams.pH264PicData->pRateControlQPMap = pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.data();
picParams.pH264PicData->QPMapValuesCount = static_cast<UINT>(pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.size());
pH264PicData->pRateControlQPMap = pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.data();
pH264PicData->QPMapValuesCount = static_cast<UINT>(pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.size());
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picParams = {};
picParams.pH264PicData = pH264PicData;
picParams.DataSize = sizeof(*pH264PicData);
pD3D12Enc->m_upDPBManager->begin_frame(picParams, bUsedAsReference, picture);
pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data(picParams);

View file

@ -52,7 +52,7 @@ void
d3d12_video_encoder_update_current_frame_pic_params_info_h264(struct d3d12_video_encoder *pD3D12Enc,
struct pipe_video_buffer * srcTexture,
struct pipe_picture_desc * picture,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &picParams,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264* pH264PicData,
bool &bUsedAsReference);
uint32_t
d3d12_video_encoder_build_codec_headers_h264(struct d3d12_video_encoder *pD3D12Enc,

View file

@ -345,7 +345,11 @@ void
d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video_encoder *pD3D12Enc,
struct pipe_video_buffer *srcTexture,
struct pipe_picture_desc *picture,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &picParams,
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 *pHEVCPicData,
#else
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC *pHEVCPicData,
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool &bUsedAsReference)
{
struct pipe_h265_enc_picture_desc *hevcPic = (struct pipe_h265_enc_picture_desc *) picture;
@ -359,33 +363,34 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
if (pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.SupportFlags &
D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_HEVC_FLAG_NUM_REF_IDX_ACTIVE_OVERRIDE_FLAG_SLICE_SUPPORT)
{
picParams.pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_REQUEST_NUM_REF_IDX_ACTIVE_OVERRIDE_FLAG_SLICE;
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_REQUEST_NUM_REF_IDX_ACTIVE_OVERRIDE_FLAG_SLICE;
}
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
if ((hevcPic->base.profile == PIPE_VIDEO_PROFILE_HEVC_MAIN_444) ||
(hevcPic->base.profile == PIPE_VIDEO_PROFILE_HEVC_MAIN10_444) ||
(hevcPic->base.profile == PIPE_VIDEO_PROFILE_HEVC_MAIN_422) ||
(hevcPic->base.profile == PIPE_VIDEO_PROFILE_HEVC_MAIN10_422))
{
assert(picParams.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1));
assert(sizeof(*pHEVCPicData) == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
if (hevcPic->pic.pps_range_extension.pps_range_extension_flag)
{
//
// Clear pps_range_extension() params if pps_range_extension_flag not enabled
//
picParams.pHEVCPicData1->log2_max_transform_skip_block_size_minus2 = 0u;
picParams.pHEVCPicData1->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
picParams.pHEVCPicData1->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
picParams.pHEVCPicData1->diff_cu_chroma_qp_offset_depth = 0u;
picParams.pHEVCPicData1->chroma_qp_offset_list_len_minus1 = 0u;
for (uint32_t i = 0; i < ARRAY_SIZE(picParams.pHEVCPicData1->cb_qp_offset_list) ; i++)
pHEVCPicData->log2_max_transform_skip_block_size_minus2 = 0u;
pHEVCPicData->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
pHEVCPicData->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
pHEVCPicData->diff_cu_chroma_qp_offset_depth = 0u;
pHEVCPicData->chroma_qp_offset_list_len_minus1 = 0u;
for (uint32_t i = 0; i < ARRAY_SIZE(pHEVCPicData->cb_qp_offset_list) ; i++)
{
picParams.pHEVCPicData1->cb_qp_offset_list[i] = 0u;
picParams.pHEVCPicData1->cr_qp_offset_list[i] = 0u;
pHEVCPicData->cb_qp_offset_list[i] = 0u;
pHEVCPicData->cr_qp_offset_list[i] = 0u;
}
picParams.pHEVCPicData1->log2_sao_offset_scale_luma = 0u;
picParams.pHEVCPicData1->log2_sao_offset_scale_chroma = 0u;
pHEVCPicData->log2_sao_offset_scale_luma = 0u;
pHEVCPicData->log2_sao_offset_scale_chroma = 0u;
}
else
{
@ -399,10 +404,10 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
{
if (hevcPic->pic.transform_skip_enabled_flag)
{
picParams.pHEVCPicData1->log2_max_transform_skip_block_size_minus2 = static_cast<CHAR>(hevcPic->pic.pps_range_extension.log2_max_transform_skip_block_size_minus2);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_log2_max_transform_skip_block_size_minus2_values & (1 << picParams.pHEVCPicData1->log2_max_transform_skip_block_size_minus2)) == 0)
pHEVCPicData->log2_max_transform_skip_block_size_minus2 = static_cast<CHAR>(hevcPic->pic.pps_range_extension.log2_max_transform_skip_block_size_minus2);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_log2_max_transform_skip_block_size_minus2_values & (1 << pHEVCPicData->log2_max_transform_skip_block_size_minus2)) == 0)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - log2_max_transform_skip_block_size_minus2 %d is not supported.\n", picParams.pHEVCPicData1->log2_max_transform_skip_block_size_minus2);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - log2_max_transform_skip_block_size_minus2 %d is not supported.\n", pHEVCPicData->log2_max_transform_skip_block_size_minus2);
assert(false);
}
}
@ -413,24 +418,24 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
//
{
if (hevcPic->pic.pps_range_extension.cross_component_prediction_enabled_flag)
picParams.pHEVCPicData1->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
if(((picParams.pHEVCPicData1->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION) != 0)
if(((pHEVCPicData->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION) != 0)
&& ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.SupportFlags & D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_HEVC_FLAG_CROSS_COMPONENT_PREDICTION_ENABLED_FLAG_SUPPORT) == 0))
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION is not supported."
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION is not supported."
" Ignoring the request for this feature flag on this encode session\n");
// Disable it and keep going with a warning
picParams.pHEVCPicData1->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
pHEVCPicData->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
}
if(((picParams.pHEVCPicData1->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION) == 0)
if(((pHEVCPicData->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION) == 0)
&& ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.SupportFlags & D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_HEVC_FLAG_CROSS_COMPONENT_PREDICTION_ENABLED_FLAG_REQUIRED) != 0))
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION is required to be set."
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION is required to be set."
" Enabling this HW required feature flag on this encode session\n");
// HW doesn't support otherwise, so set it
picParams.pHEVCPicData1->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CROSS_COMPONENT_PREDICTION;
}
}
@ -439,66 +444,66 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
//
if (hevcPic->pic.pps_range_extension.chroma_qp_offset_list_enabled_flag)
{
picParams.pHEVCPicData1->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
if(((picParams.pHEVCPicData1->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST) != 0)
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
if(((pHEVCPicData->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST) != 0)
&& ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.SupportFlags & D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_HEVC_FLAG_CHROMA_QP_OFFSET_LIST_ENABLED_FLAG_SUPPORT) == 0))
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST is not supported."
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST is not supported."
" Ignoring the request for this feature flag on this encode session\n");
// Disable it and keep going with a warning
picParams.pHEVCPicData1->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
pHEVCPicData->Flags &= ~D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
}
if(((picParams.pHEVCPicData1->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST) == 0)
if(((pHEVCPicData->Flags & D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST) == 0)
&& ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.SupportFlags & D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_HEVC_FLAG_CHROMA_QP_OFFSET_LIST_ENABLED_FLAG_REQUIRED) != 0))
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST is required to be set."
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST is required to be set."
" Enabling this HW required feature flag on this encode session\n");
// HW doesn't support otherwise, so set it
picParams.pHEVCPicData1->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_CHROMA_QP_OFFSET_LIST;
}
//
// Set and validate diff_cu_chroma_qp_offset_depth
//
picParams.pHEVCPicData1->diff_cu_chroma_qp_offset_depth = static_cast<UCHAR>(hevcPic->pic.pps_range_extension.diff_cu_chroma_qp_offset_depth);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_diff_cu_chroma_qp_offset_depth_values & (1 << picParams.pHEVCPicData1->diff_cu_chroma_qp_offset_depth)) == 0)
pHEVCPicData->diff_cu_chroma_qp_offset_depth = static_cast<UCHAR>(hevcPic->pic.pps_range_extension.diff_cu_chroma_qp_offset_depth);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_diff_cu_chroma_qp_offset_depth_values & (1 << pHEVCPicData->diff_cu_chroma_qp_offset_depth)) == 0)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - diff_cu_chroma_qp_offset_depth %d is not supported.\n", picParams.pHEVCPicData1->diff_cu_chroma_qp_offset_depth);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - diff_cu_chroma_qp_offset_depth %d is not supported.\n", pHEVCPicData->diff_cu_chroma_qp_offset_depth);
assert(false);
}
//
// Set and validate chroma_qp_offset_list_len_minus1
//
picParams.pHEVCPicData1->chroma_qp_offset_list_len_minus1 = static_cast<CHAR>(hevcPic->pic.pps_range_extension.chroma_qp_offset_list_len_minus1);
pHEVCPicData->chroma_qp_offset_list_len_minus1 = static_cast<CHAR>(hevcPic->pic.pps_range_extension.chroma_qp_offset_list_len_minus1);
if (hevcPic->pic.pps_range_extension.chroma_qp_offset_list_len_minus1 > 5)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - chroma_qp_offset_list_len_minus1 %d is not supported.\n", hevcPic->pic.pps_range_extension.chroma_qp_offset_list_len_minus1);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - chroma_qp_offset_list_len_minus1 %d is not supported.\n", hevcPic->pic.pps_range_extension.chroma_qp_offset_list_len_minus1);
assert(false);
}
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_chroma_qp_offset_list_len_minus1_values & (1 << picParams.pHEVCPicData1->chroma_qp_offset_list_len_minus1)) == 0)
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_chroma_qp_offset_list_len_minus1_values & (1 << pHEVCPicData->chroma_qp_offset_list_len_minus1)) == 0)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - chroma_qp_offset_list_len_minus1 %d is not supported.\n", picParams.pHEVCPicData1->chroma_qp_offset_list_len_minus1);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - chroma_qp_offset_list_len_minus1 %d is not supported.\n", pHEVCPicData->chroma_qp_offset_list_len_minus1);
assert(false);
}
//
// Set and validate cb_qp_offset_list, cr_qp_offset_list
//
for (uint32_t i = 0; i < picParams.pHEVCPicData1->chroma_qp_offset_list_len_minus1 ; i++)
for (uint32_t i = 0; i < pHEVCPicData->chroma_qp_offset_list_len_minus1 ; i++)
{
picParams.pHEVCPicData1->cb_qp_offset_list[i] = static_cast<CHAR>(hevcPic->pic.pps_range_extension.cb_qp_offset_list[i]);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_cb_qp_offset_list_values[i] & (1 << (picParams.pHEVCPicData1->cb_qp_offset_list[i] + 12))) == 0)
pHEVCPicData->cb_qp_offset_list[i] = static_cast<CHAR>(hevcPic->pic.pps_range_extension.cb_qp_offset_list[i]);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_cb_qp_offset_list_values[i] & (1 << (pHEVCPicData->cb_qp_offset_list[i] + 12))) == 0)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - cb_qp_offset_list[%d] %d is not supported.\n", i, picParams.pHEVCPicData1->chroma_qp_offset_list_len_minus1);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - cb_qp_offset_list[%d] %d is not supported.\n", i, pHEVCPicData->chroma_qp_offset_list_len_minus1);
assert(false);
}
picParams.pHEVCPicData1->cr_qp_offset_list[i] = static_cast<CHAR>(hevcPic->pic.pps_range_extension.cr_qp_offset_list[i]);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_cr_qp_offset_list_values[i] & (1 << (picParams.pHEVCPicData1->cr_qp_offset_list[i] + 12))) == 0)
pHEVCPicData->cr_qp_offset_list[i] = static_cast<CHAR>(hevcPic->pic.pps_range_extension.cr_qp_offset_list[i]);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_cr_qp_offset_list_values[i] & (1 << (pHEVCPicData->cr_qp_offset_list[i] + 12))) == 0)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - cr_qp_offset_list[%d] %d is not supported.\n", i, picParams.pHEVCPicData1->chroma_qp_offset_list_len_minus1);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - cr_qp_offset_list[%d] %d is not supported.\n", i, pHEVCPicData->chroma_qp_offset_list_len_minus1);
assert(false);
}
}
@ -507,42 +512,62 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
//
// Set and validate log2_sao_offset_scale_luma
//
picParams.pHEVCPicData1->log2_sao_offset_scale_luma = static_cast<UCHAR>(hevcPic->pic.pps_range_extension.log2_sao_offset_scale_luma);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_log2_sao_offset_scale_luma_values & (1 << picParams.pHEVCPicData1->log2_sao_offset_scale_luma)) == 0)
pHEVCPicData->log2_sao_offset_scale_luma = static_cast<UCHAR>(hevcPic->pic.pps_range_extension.log2_sao_offset_scale_luma);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_log2_sao_offset_scale_luma_values & (1 << pHEVCPicData->log2_sao_offset_scale_luma)) == 0)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - log2_sao_offset_scale_luma %d is not supported.\n", picParams.pHEVCPicData1->log2_sao_offset_scale_luma);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - log2_sao_offset_scale_luma %d is not supported.\n", pHEVCPicData->log2_sao_offset_scale_luma);
assert(false);
}
//
// Set and validate log2_sao_offset_scale_chroma
//
picParams.pHEVCPicData1->log2_sao_offset_scale_chroma = static_cast<UCHAR>(hevcPic->pic.pps_range_extension.log2_sao_offset_scale_chroma);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_log2_sao_offset_scale_chroma_values & (1 << picParams.pHEVCPicData1->log2_sao_offset_scale_chroma)) == 0)
pHEVCPicData->log2_sao_offset_scale_chroma = static_cast<UCHAR>(hevcPic->pic.pps_range_extension.log2_sao_offset_scale_chroma);
if ((pD3D12Enc->m_currentEncodeCapabilities.m_encoderCodecSpecificConfigCaps.m_HEVCCodecCaps.allowed_log2_sao_offset_scale_chroma_values & (1 << pHEVCPicData->log2_sao_offset_scale_chroma)) == 0)
{
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 arguments are not supported - log2_sao_offset_scale_chroma %d is not supported.\n", picParams.pHEVCPicData1->log2_sao_offset_scale_chroma);
debug_printf("D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 arguments are not supported - log2_sao_offset_scale_chroma %d is not supported.\n", pHEVCPicData->log2_sao_offset_scale_chroma);
assert(false);
}
}
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
picParams.pHEVCPicData->slice_pic_parameter_set_id = pHEVCBitstreamBuilder->get_active_pps().pps_pic_parameter_set_id;
pHEVCPicData->slice_pic_parameter_set_id = pHEVCBitstreamBuilder->get_active_pps().pps_pic_parameter_set_id;
//
// These need to be set here so they're available for SPS/PPS header building (reference manager updates after that, for slice header params)
//
picParams.pHEVCPicData->TemporalLayerIndex = hevcPic->pic.temporal_id;
picParams.pHEVCPicData->List0ReferenceFramesCount = 0;
picParams.pHEVCPicData->List1ReferenceFramesCount = 0;
pHEVCPicData->TemporalLayerIndex = hevcPic->pic.temporal_id;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
pHEVCPicData->num_ref_idx_l0_active_minus1 = 0;
pHEVCPicData->num_ref_idx_l1_active_minus1 = 0;
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
pHEVCPicData->List0ReferenceFramesCount = 0;
pHEVCPicData->List1ReferenceFramesCount = 0;
if ((hevcPic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_P) ||
(hevcPic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B))
picParams.pHEVCPicData->List0ReferenceFramesCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
{
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
pHEVCPicData->num_ref_idx_l0_active_minus1 = hevcPic->num_ref_idx_l0_active_minus1;
pHEVCPicData->List0ReferenceFramesCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
#else // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
pHEVCPicData->List0ReferenceFramesCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
}
if (hevcPic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B)
picParams.pHEVCPicData->List1ReferenceFramesCount = hevcPic->num_ref_idx_l1_active_minus1 + 1;
{
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
pHEVCPicData->num_ref_idx_l1_active_minus1 = hevcPic->num_ref_idx_l1_active_minus1;
pHEVCPicData->List1ReferenceFramesCount = hevcPic->num_ref_idx_l1_active_minus1 + 1;
#else // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
pHEVCPicData->List1ReferenceFramesCount = hevcPic->num_ref_idx_l1_active_minus1 + 1;
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
}
if ((pD3D12Enc->m_currentEncodeConfig.m_encoderCodecSpecificConfigDesc.m_HEVCConfig.ConfigurationFlags
& D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC_FLAG_ALLOW_REQUEST_INTRA_CONSTRAINED_SLICES) != 0)
picParams.pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_REQUEST_INTRA_CONSTRAINED_SLICES;
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_REQUEST_INTRA_CONSTRAINED_SLICES;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
if (pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.AppRequested)
{
@ -555,13 +580,27 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
hevc_min_delta_qp,
hevc_max_delta_qp,
pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit);
picParams.pHEVCPicData->pRateControlQPMap = pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.data();
picParams.pHEVCPicData->QPMapValuesCount = static_cast<UINT>(pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.size());
pHEVCPicData->pRateControlQPMap = pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.data();
pHEVCPicData->QPMapValuesCount = static_cast<UINT>(pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.m_pRateControlQPMap8Bit.size());
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
// TODO: Here call begin_frame1 and get_current_frame_picture_control_data1 when applicable
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 picParams1 = {};
picParams1.pHEVCPicData = pHEVCPicData;
picParams1.DataSize = sizeof(*pHEVCPicData);
pD3D12Enc->m_upDPBManager->begin_frame1(picParams1, bUsedAsReference, picture);
pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data1(picParams1);
#else
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picParams = {};
picParams.pHEVCPicData = pHEVCPicData;
picParams.DataSize = sizeof(*pHEVCPicData);
pD3D12Enc->m_upDPBManager->begin_frame(picParams, bUsedAsReference, picture);
pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data(picParams);
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
// Save state snapshot from record time to resolve headers at get_feedback time
size_t current_metadata_slot = static_cast<size_t>(pD3D12Enc->m_fenceValue % D3D12_VIDEO_ENC_METADATA_BUFFERS_COUNT);
@ -777,27 +816,6 @@ ConvertHEVCSupportFromProfile(D3D12_VIDEO_ENCODER_PROFILE_HEVC profile, D3D12_VI
return capCodecConfigData;
}
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
ConvertHEVCPicParamsFromProfile(D3D12_VIDEO_ENCODER_PROFILE_HEVC profile, D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1* pPictureParams1)
{
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curPicParamsData = {};
if (profile <= D3D12_VIDEO_ENCODER_PROFILE_HEVC_MAIN10)
{
// Profiles defined up to D3D12_VIDEO_ENCODER_PROFILE_HEVC_MAIN10 use D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC
curPicParamsData.pHEVCPicData = reinterpret_cast<D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC*>(pPictureParams1);
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 binary-compatible with D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC
curPicParamsData.DataSize = sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC);
}
else
{
// Profiles defined between D3D12_VIDEO_ENCODER_PROFILE_HEVC_MAIN12 and D3D12_VIDEO_ENCODER_PROFILE_HEVC_MAIN16_444 use D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1
assert (profile <= D3D12_VIDEO_ENCODER_PROFILE_HEVC_MAIN16_444);
curPicParamsData.pHEVCPicData1 = pPictureParams1;
curPicParamsData.DataSize = sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1);
}
return curPicParamsData;
}
D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC
d3d12_video_encoder_convert_hevc_codec_configuration(struct d3d12_video_encoder *pD3D12Enc,
pipe_h265_enc_picture_desc *picture,
@ -1574,7 +1592,7 @@ d3d12_video_encoder_build_codec_headers_hevc(struct d3d12_video_encoder *pD3D12E
pHEVCBitstreamBuilder->get_active_sps(),
static_cast<uint8_t>(currentPicParams.pHEVCPicData->slice_pic_parameter_set_id),
*codecConfigDesc.pHEVCConfig,
*currentPicParams.pHEVCPicData1,
*currentPicParams.pHEVCPicData,
pD3D12Enc->m_StagingHeadersBuffer,
pD3D12Enc->m_StagingHeadersBuffer.begin(),
writtenPPSBytesCount);

View file

@ -52,7 +52,11 @@ void
d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video_encoder *pD3D12Enc,
struct pipe_video_buffer * srcTexture,
struct pipe_picture_desc * picture,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &picParams,
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 *pHEVCPicData,
#else
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC *pHEVCPicData,
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool &bUsedAsReference);
uint32_t
d3d12_video_encoder_build_codec_headers_hevc(struct d3d12_video_encoder *pD3D12Enc,

View file

@ -587,7 +587,7 @@ d3d12_video_bitstream_builder_hevc::build_pps(const struct pipe_h265_enc_pic_par
const HevcSeqParameterSet& parentSPS,
uint8_t pic_parameter_set_id,
const D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC& codecConfig,
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1& pictureControl,
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC& pictureControl,
std::vector<BYTE> &headerBitstream,
std::vector<BYTE>::iterator placingPositionStart,
size_t &writtenBytes)

View file

@ -61,7 +61,7 @@ class d3d12_video_bitstream_builder_hevc : public d3d12_video_bitstream_builder_
const HevcSeqParameterSet& parentSPS,
uint8_t pic_parameter_set_id,
const D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC& codecConfig,
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1& pictureControl,
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC& pictureControl,
std::vector<BYTE> &headerBitstream,
std::vector<BYTE>::iterator placingPositionStart,
size_t &writtenBytes);

View file

@ -31,10 +31,15 @@ class d3d12_video_encoder_references_manager_interface
{
public:
virtual void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA, bool bUsedAsReference, struct pipe_picture_desc* picture) = 0;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
virtual void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1, bool bUsedAsReference, struct pipe_picture_desc* picture) = 0;
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
virtual void end_frame() = 0;
virtual D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE get_current_frame_recon_pic_output_allocation() = 0;
virtual bool
get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation) = 0;
virtual bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation) = 0;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
virtual bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation) = 0;
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
virtual bool is_current_frame_used_as_reference() = 0;
virtual D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames() = 0;
virtual ~d3d12_video_encoder_references_manager_interface()

View file

@ -76,12 +76,30 @@ d3d12_video_encoder_references_manager_av1::is_current_frame_used_as_reference()
return m_isCurrentFrameUsedAsReference;
}
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void
d3d12_video_encoder_references_manager_av1::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture)
d3d12_video_encoder_references_manager_av1::begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
m_CurrentFramePicParams = *curFrameData.pAV1PicData;
begin_frame_impl(bUsedAsReference, picture);
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void
d3d12_video_encoder_references_manager_av1::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
m_CurrentFramePicParams = *curFrameData.pAV1PicData;
begin_frame_impl(bUsedAsReference, picture);
}
void
d3d12_video_encoder_references_manager_av1::begin_frame_impl(bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
m_isCurrentFrameUsedAsReference = bUsedAsReference;
if (m_CurrentFramePicParams.FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_KEY_FRAME)
@ -294,6 +312,21 @@ d3d12_video_encoder_references_manager_av1::get_dpb_physical_slot_refcount_from_
return refCount;
}
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool
d3d12_video_encoder_references_manager_av1::get_current_frame_picture_control_data1(
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
{
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picData = {};
picData.DataSize = codecAllocation.DataSize;
picData.pAV1PicData = codecAllocation.pAV1PicData;
bool res = get_current_frame_picture_control_data(picData);
codecAllocation.DataSize = picData.DataSize;
codecAllocation.pAV1PicData = picData.pAV1PicData;
return res;
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool
d3d12_video_encoder_references_manager_av1::get_current_frame_picture_control_data(
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation)

View file

@ -37,6 +37,12 @@ class d3d12_video_encoder_references_manager_av1 : public d3d12_video_encoder_re
struct pipe_picture_desc *picture);
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE get_current_frame_recon_pic_output_allocation();
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation);
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture);
bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool is_current_frame_used_as_reference();
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames();
@ -58,6 +64,8 @@ class d3d12_video_encoder_references_manager_av1 : public d3d12_video_encoder_re
void print_virtual_dpb_entries();
void print_physical_resource_references();
void print_ref_frame_idx();
void begin_frame_impl(bool bUsedAsReference,
struct pipe_picture_desc *picture);
// Class members

View file

@ -30,6 +30,21 @@
using namespace std;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool
d3d12_video_encoder_references_manager_h264::get_current_frame_picture_control_data1(
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
{
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picData = {};
picData.DataSize = codecAllocation.DataSize;
picData.pH264PicData = codecAllocation.pH264PicData;
bool res = get_current_frame_picture_control_data(picData);
codecAllocation.DataSize = picData.DataSize;
codecAllocation.pH264PicData = picData.pH264PicData;
return res;
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool
d3d12_video_encoder_references_manager_h264::get_current_frame_picture_control_data(
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation)
@ -274,12 +289,30 @@ d3d12_video_encoder_convert_frame_type_h264(enum pipe_h2645_enc_picture_type pic
}
}
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void
d3d12_video_encoder_references_manager_h264::begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
m_curFrameState = *curFrameData.pH264PicData;
begin_frame_impl(bUsedAsReference, picture);
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void
d3d12_video_encoder_references_manager_h264::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
m_curFrameState = *curFrameData.pH264PicData;
begin_frame_impl(bUsedAsReference, picture);
}
void
d3d12_video_encoder_references_manager_h264::begin_frame_impl(bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
m_isCurrentFrameUsedAsReference = bUsedAsReference;
struct pipe_h264_enc_picture_desc *h264Pic = (struct pipe_h264_enc_picture_desc *) picture;

View file

@ -34,6 +34,13 @@ class d3d12_video_encoder_references_manager_h264 : public d3d12_video_encoder_r
bool bUsedAsReference,
struct pipe_picture_desc *picture);
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation);
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture);
bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames();
bool is_current_frame_used_as_reference()
@ -57,6 +64,8 @@ class d3d12_video_encoder_references_manager_h264 : public d3d12_video_encoder_r
void print_dpb();
void print_l0_l1_lists();
void print_mmco_lists();
void begin_frame_impl(bool bUsedAsReference,
struct pipe_picture_desc *picture);
// Class members
struct d3d12_video_dpb

View file

@ -30,14 +30,23 @@
using namespace std;
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool
d3d12_video_encoder_references_manager_hevc::get_current_frame_picture_control_data1(
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
{
assert(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
memcpy(codecAllocation.pHEVCPicData, &m_curFrameState, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
return true;
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool
d3d12_video_encoder_references_manager_hevc::get_current_frame_picture_control_data(
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation)
{
assert((codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC)) ||
(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1)));
memcpy(codecAllocation.pHEVCPicData1, &m_curFrameState, codecAllocation.DataSize);
memset((uint8_t *)(codecAllocation.pHEVCPicData1) + codecAllocation.DataSize, 0, sizeof(m_curFrameState) - codecAllocation.DataSize);
assert(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
memcpy(codecAllocation.pHEVCPicData, &m_curFrameState, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
return true;
}
@ -243,16 +252,33 @@ d3d12_video_encoder_convert_frame_type_hevc(enum pipe_h2645_enc_picture_type pic
}
}
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void
d3d12_video_encoder_references_manager_hevc::begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
assert(curFrameData.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
memcpy(&m_curFrameState, curFrameData.pHEVCPicData, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
begin_frame_impl(bUsedAsReference, picture);
}
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void
d3d12_video_encoder_references_manager_hevc::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
assert((curFrameData.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC)) ||
(curFrameData.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1)));
memcpy(&m_curFrameState, curFrameData.pHEVCPicData1, curFrameData.DataSize);
assert(curFrameData.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
memcpy(&m_curFrameState, curFrameData.pHEVCPicData, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
memset(((uint8_t*)(&m_curFrameState) + curFrameData.DataSize), 0, sizeof(m_curFrameState) - curFrameData.DataSize);
begin_frame_impl(bUsedAsReference, picture);
}
void
d3d12_video_encoder_references_manager_hevc::begin_frame_impl(bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
m_isCurrentFrameUsedAsReference = bUsedAsReference;
struct pipe_h265_enc_picture_desc *hevcPic = (struct pipe_h265_enc_picture_desc *) picture;

View file

@ -34,6 +34,12 @@ class d3d12_video_encoder_references_manager_hevc : public d3d12_video_encoder_r
bool bUsedAsReference,
struct pipe_picture_desc *picture);
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation);
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture);
bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames();
bool is_current_frame_used_as_reference()
@ -58,6 +64,8 @@ class d3d12_video_encoder_references_manager_hevc : public d3d12_video_encoder_r
void update_fifo_dpb_push_front_cur_recon_pic();
void print_dpb();
void print_l0_l1_lists();
void begin_frame_impl(bool bUsedAsReference,
struct pipe_picture_desc *picture);
// Class members
struct d3d12_video_dpb
@ -80,7 +88,11 @@ class d3d12_video_encoder_references_manager_hevc : public d3d12_video_encoder_r
current_frame_references_data m_CurrentFrameReferencesData;
bool m_isCurrentFrameUsedAsReference = false;
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1 m_curFrameState = {};
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 m_curFrameState = {};
#else
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC m_curFrameState = {};
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
bool m_fArrayOfTextures = false;
};

View file

@ -1091,6 +1091,12 @@ get_gpu_output_stats_support(D3D12_VIDEO_ENCODER_INPUT_MAP_SESSION_INFO sessionI
{0u, 0u},
// D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC EncoderOutputMetadataBitAllocationMapTextureDimensions; // output
{0u, 0u},
// UINT EncoderOutputMetadataFramePSNRComponentsNumber; // output
0u,
// UINT EncoderOutputMetadataSubregionsPSNRComponentsNumber; // output
0u,
// UINT EncoderOutputMetadataSubregionsPSNRResolvedMetadataBufferSize; // output
0u,
};
if (SUCCEEDED(pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_RESOURCE_REQUIREMENTS1, &capStatsResourceReqs, sizeof(capStatsResourceReqs))))
@ -1395,6 +1401,21 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
case PIPE_VIDEO_PROFILE_HEVC_MAIN_422:
case PIPE_VIDEO_PROFILE_HEVC_MAIN10_422:
{
bool bRuntimeSupportsProfile = true;
if ((profile != PIPE_VIDEO_PROFILE_HEVC_MAIN) &&
(profile != PIPE_VIDEO_PROFILE_HEVC_MAIN_10))
{
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
// Video encode support in underlying d3d12 device needs ID3D12VideoDevice4
// for this HEVC 422/444 d3d12 gallium driver implementation
ComPtr<ID3D12VideoDevice4> spD3D12VideoDevice4;
bRuntimeSupportsProfile = SUCCEEDED(spD3D12VideoDevice->QueryInterface(IID_PPV_ARGS(spD3D12VideoDevice4.GetAddressOf())));
#else
bRuntimeSupportsProfile = false;
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
}
D3D12_VIDEO_ENCODER_PROFILE_DESC profDesc = {};
D3D12_VIDEO_ENCODER_PROFILE_HEVC profHEVC =
d3d12_video_encoder_convert_profile_to_d3d12_enc_profile_hevc(profile);
@ -1410,11 +1431,11 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
maxLvl.pHEVCLevelSetting = &maxLvlSettingHEVC;
maxLvl.DataSize = sizeof(maxLvlSettingHEVC);
if (d3d12_video_encode_max_supported_level_for_profile(codecDesc,
profDesc,
minLvl,
maxLvl,
spD3D12VideoDevice.Get())) {
if (bRuntimeSupportsProfile && d3d12_video_encode_max_supported_level_for_profile(codecDesc,
profDesc,
minLvl,
maxLvl,
spD3D12VideoDevice.Get())) {
d3d12_video_encoder_convert_from_d3d12_level_hevc(maxLvlSettingHEVC.Level, maxLvlSpec);
D3D12_VIDEO_ENCODER_PROFILE_DESC d3d12_profile;

View file

@ -42,7 +42,7 @@ using Microsoft::WRL::ComPtr;
#define D3D12_VIDEO_ANY_DECODER_ENABLED (VIDEO_CODEC_H264DEC || VIDEO_CODEC_H265DEC || VIDEO_CODEC_AV1DEC || VIDEO_CODEC_VP9DEC)
#define D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE (USE_D3D12_PREVIEW_HEADERS && (D3D12_PREVIEW_SDK_VERSION >= 716))
#define D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE (USE_D3D12_PREVIEW_HEADERS && (D3D12_PREVIEW_SDK_VERSION >= 717))
#if !defined(_WIN32) || defined(_MSC_VER)
inline D3D12_VIDEO_DECODER_HEAP_DESC
@ -162,8 +162,6 @@ uint8_t
d3d12_video_encoder_convert_12tusize_to_pixel_size_hevc(const D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC_TUSIZE& TUSize);
D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT
ConvertHEVCSupportFromProfile(D3D12_VIDEO_ENCODER_PROFILE_HEVC profile, D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_HEVC1* pSupport1);
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
ConvertHEVCPicParamsFromProfile(D3D12_VIDEO_ENCODER_PROFILE_HEVC profile, D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC1* pPictureParams1);
bool
d3d12_video_encode_requires_texture_array_dpb(struct d3d12_screen* pScreen, enum pipe_video_profile profile);