d3d12: Video Encode HEVC to use direct DPB from frontend

Reviewed-by: Jesse Natalie <jenatali@microsoft.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/30908>
This commit is contained in:
Sil Vilerino 2024-08-29 07:20:33 -04:00 committed by Marge Bot
parent 0249f2e652
commit e268ed0613
5 changed files with 284 additions and 432 deletions

View file

@ -352,7 +352,7 @@ d3d12_video_encoder_uses_direct_dpb(enum pipe_video_format codec)
#if VIDEO_CODEC_H265ENC
case PIPE_VIDEO_FORMAT_HEVC:
{
return false;
return true;
} break;
#endif
#if VIDEO_CODEC_AV1ENC
@ -606,19 +606,7 @@ d3d12_video_encoder_create_reference_picture_manager(struct d3d12_video_encoder
#if VIDEO_CODEC_H265ENC
case PIPE_VIDEO_FORMAT_HEVC:
{
bool gopHasPFrames =
(pD3D12Enc->m_currentEncodeConfig.m_encoderGOPConfigDesc.m_HEVCGroupOfPictures.PPicturePeriod > 0) &&
((pD3D12Enc->m_currentEncodeConfig.m_encoderGOPConfigDesc.m_HEVCGroupOfPictures.GOPLength == 0) ||
(pD3D12Enc->m_currentEncodeConfig.m_encoderGOPConfigDesc.m_HEVCGroupOfPictures.PPicturePeriod <
pD3D12Enc->m_currentEncodeConfig.m_encoderGOPConfigDesc.m_HEVCGroupOfPictures.GOPLength));
pD3D12Enc->m_upDPBManager = std::make_unique<d3d12_video_encoder_references_manager_hevc>(
gopHasPFrames,
*pD3D12Enc->m_upDPBStorageManager,
// Max number of frames to be used as a reference, without counting the current recon picture
d3d12_video_encoder_get_current_max_dpb_capacity(pD3D12Enc)
);
pD3D12Enc->m_upDPBManager = std::make_unique<d3d12_video_encoder_references_manager_hevc>();
pD3D12Enc->m_upBitstreamBuilder = std::make_unique<d3d12_video_bitstream_builder_hevc>();
} break;
#endif

View file

@ -358,23 +358,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
}
picParams.pHEVCPicData->slice_pic_parameter_set_id = pHEVCBitstreamBuilder->get_active_pps().pps_pic_parameter_set_id;
picParams.pHEVCPicData->FrameType = d3d12_video_encoder_convert_frame_type_hevc(hevcPic->picture_type);
picParams.pHEVCPicData->PictureOrderCountNumber = hevcPic->pic_order_cnt;
picParams.pHEVCPicData->List0ReferenceFramesCount = 0;
picParams.pHEVCPicData->pList0ReferenceFrames = nullptr;
picParams.pHEVCPicData->List1ReferenceFramesCount = 0;
picParams.pHEVCPicData->pList1ReferenceFrames = nullptr;
if (picParams.pHEVCPicData->FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_P_FRAME) {
picParams.pHEVCPicData->List0ReferenceFramesCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
picParams.pHEVCPicData->pList0ReferenceFrames = hevcPic->ref_idx_l0_list;
} else if (picParams.pHEVCPicData->FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME) {
picParams.pHEVCPicData->List0ReferenceFramesCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
picParams.pHEVCPicData->pList0ReferenceFrames = hevcPic->ref_idx_l0_list;
picParams.pHEVCPicData->List1ReferenceFramesCount = hevcPic->num_ref_idx_l1_active_minus1 + 1;
picParams.pHEVCPicData->pList1ReferenceFrames = hevcPic->ref_idx_l1_list;
}
if ((pD3D12Enc->m_currentEncodeConfig.m_encoderCodecSpecificConfigDesc.m_HEVCConfig.ConfigurationFlags
& D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC_FLAG_ALLOW_REQUEST_INTRA_CONSTRAINED_SLICES) != 0)
@ -396,33 +379,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
}
}
D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC
d3d12_video_encoder_convert_frame_type_hevc(enum pipe_h2645_enc_picture_type picType)
{
switch (picType) {
case PIPE_H2645_ENC_PICTURE_TYPE_P:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_P_FRAME;
} break;
case PIPE_H2645_ENC_PICTURE_TYPE_B:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME;
} break;
case PIPE_H2645_ENC_PICTURE_TYPE_I:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_I_FRAME;
} break;
case PIPE_H2645_ENC_PICTURE_TYPE_IDR:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_IDR_FRAME;
} break;
default:
{
unreachable("Unsupported pipe_h2645_enc_picture_type");
} break;
}
}
///
/// Tries to configurate the encoder using the requested slice configuration
/// or falls back to single slice encoding.

View file

@ -54,8 +54,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
struct pipe_picture_desc * picture,
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &picParams,
bool &bUsedAsReference);
D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC
d3d12_video_encoder_convert_frame_type_hevc(enum pipe_h2645_enc_picture_type picType);
uint32_t
d3d12_video_encoder_build_codec_headers_hevc(struct d3d12_video_encoder *pD3D12Enc,
std::vector<uint64_t> &pWrittenCodecUnitsSizes);

View file

@ -25,186 +25,25 @@
#include <algorithm>
#include <string>
#include "d3d12_screen.h"
#include "d3d12_resource.h"
#include "d3d12_video_buffer.h"
using namespace std;
d3d12_video_encoder_references_manager_hevc::d3d12_video_encoder_references_manager_hevc(
bool gopHasIorPFrames, d3d12_video_dpb_storage_manager_interface &rDpbStorageManager, uint32_t MaxDPBCapacity)
: m_MaxDPBCapacity(MaxDPBCapacity),
m_rDPBStorageManager(rDpbStorageManager),
m_CurrentFrameReferencesData({}),
m_gopHasInterFrames(gopHasIorPFrames)
{
assert((m_MaxDPBCapacity + 1 /*extra for cur frame output recon pic*/) ==
m_rDPBStorageManager.get_number_of_tracked_allocations());
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] Completed construction of "
"d3d12_video_encoder_references_manager_hevc instance, settings are\n");
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] m_MaxDPBCapacity: %d\n", m_MaxDPBCapacity);
}
void
d3d12_video_encoder_references_manager_hevc::reset_gop_tracking_and_dpb()
{
// Reset m_CurrentFrameReferencesData tracking
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.clear();
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.reserve(m_MaxDPBCapacity);
m_curFrameStateDescriptorStorage.reserve(m_MaxDPBCapacity);
m_CurrentFrameReferencesData.ReconstructedPicTexture = { nullptr, 0 };
// Reset DPB storage
ASSERTED uint32_t numPicsBeforeClearInDPB = m_rDPBStorageManager.get_number_of_pics_in_dpb();
ASSERTED uint32_t cFreedResources = m_rDPBStorageManager.clear_decode_picture_buffer();
assert(numPicsBeforeClearInDPB == cFreedResources);
// Initialize if needed the reconstructed picture allocation for the first IDR picture in the GOP
// This needs to be done after initializing the GOP tracking state above since it makes decisions based on the
// current picture type.
prepare_current_frame_recon_pic_allocation();
// After clearing the DPB, outstanding used allocations should be 1u only for the first allocation for the
// reconstructed picture of the initial IDR in the GOP
assert(m_rDPBStorageManager.get_number_of_in_use_allocations() == (m_gopHasInterFrames ? 1u : 0u));
assert(m_rDPBStorageManager.get_number_of_tracked_allocations() <=
(m_MaxDPBCapacity + 1)); // pool is not extended beyond maximum expected usage
}
// Calculates the picture control structure for the current frame
bool
d3d12_video_encoder_references_manager_hevc::get_current_frame_picture_control_data(
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation)
{
// Update reference picture control structures (L0/L1 and DPB descriptors lists based on current frame and next frame
// in GOP) for next frame
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] %d resources IN USE out of a total of %d ALLOCATED "
"resources at frame with POC: %d\n",
m_rDPBStorageManager.get_number_of_in_use_allocations(),
m_rDPBStorageManager.get_number_of_tracked_allocations(),
m_curFrameState.PictureOrderCountNumber);
// See casts below
assert(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size() < UINT32_MAX);
bool needsL0List = (m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_P_FRAME) ||
(m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME);
bool needsL1List = (m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME);
// (in HEVC I pics might contain following pics, ref not used in curr)
bool needsRefPicDescriptors = (m_curFrameState.FrameType != D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_IDR_FRAME);
assert(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
// See D3D12 Encode spec below
// pList0ReferenceFrames
// List of past frame reference frames to be used for this frame. Each integer value in this array indices into
// pReferenceFramesReconPictureDescriptors to reference pictures kept in the DPB.
// pList1ReferenceFrames
// List of future frame reference frames to be used for this frame. Each integer value in this array indices into
// pReferenceFramesReconPictureDescriptors to reference pictures kept in the DPB.
// Need to map from frame_num in the receiving ref_idx_l0_list/ref_idx_l1_list to the position with that
// reference_lists_frame_idx in the DPB descriptor
// L0 and L1 need to be ordered by POC (ie. different RefPicSets). The upper layers building the lists should but might not be following this
// Set all in the DPB as unused for the current frame, then below just mark as used the ones references by L0 and L1
for(UINT idx = 0 ; idx < m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size() ; idx++)
{
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[idx].base.IsRefUsedByCurrentPic = false;
}
if (needsL0List && (m_curFrameState.List0ReferenceFramesCount > 0)) {
std::vector<uint32_t> tmpL0(m_curFrameState.List0ReferenceFramesCount, 0);
memcpy(tmpL0.data(),
m_curFrameState.pList0ReferenceFrames,
m_curFrameState.List0ReferenceFramesCount * sizeof(m_curFrameState.pList0ReferenceFrames[0]));
for (size_t l0Idx = 0; l0Idx < m_curFrameState.List0ReferenceFramesCount; l0Idx++) {
// tmpL0[l0Idx] has frame_num's (reference_lists_frame_idx)
// m_curFrameState.pList0ReferenceFrames[l0Idx] needs to have the index j of
// pReferenceFramesReconPictureDescriptors where
// pReferenceFramesReconPictureDescriptors[j].reference_lists_frame_idx == tmpL0[l0Idx]
auto value = tmpL0[l0Idx];
auto foundItemIt = std::find_if(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.begin(),
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.end(),
[&value](const D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC_EX &p) {
return p.reference_lists_frame_idx == value;
});
assert(foundItemIt != m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.end());
if (foundItemIt == m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.end())
{
return true;
}
m_curFrameState.pList0ReferenceFrames[l0Idx] =
std::distance(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.begin(), foundItemIt);
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[m_curFrameState.pList0ReferenceFrames[l0Idx]].base.IsRefUsedByCurrentPic = true;
}
}
if (needsL1List && (m_curFrameState.List1ReferenceFramesCount > 0)) {
std::vector<uint32_t> tmpL1(m_curFrameState.List1ReferenceFramesCount, 0);
memcpy(tmpL1.data(),
m_curFrameState.pList1ReferenceFrames,
m_curFrameState.List1ReferenceFramesCount * sizeof(m_curFrameState.pList1ReferenceFrames[0]));
for (size_t l1Idx = 0; l1Idx < m_curFrameState.List1ReferenceFramesCount; l1Idx++) {
// tmpL1[l1Idx] has frame_num's (reference_lists_frame_idx)
// m_curFrameState.pList1ReferenceFrames[l1Idx] needs to have the index j of
// pReferenceFramesReconPictureDescriptors where
// pReferenceFramesReconPictureDescriptors[j].reference_lists_frame_idx == tmpL1[l1Idx]
auto value = tmpL1[l1Idx];
auto foundItemIt = std::find_if(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.begin(),
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.end(),
[&value](const D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC_EX &p) {
return p.reference_lists_frame_idx == value;
});
assert(foundItemIt != m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.end());
if (foundItemIt == m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.end())
{
return true;
}
m_curFrameState.pList1ReferenceFrames[l1Idx] =
std::distance(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.begin(), foundItemIt);
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[m_curFrameState.pList1ReferenceFrames[l1Idx]].base.IsRefUsedByCurrentPic = true;
}
}
m_curFrameState.List0ReferenceFramesCount = needsL0List ? m_curFrameState.List0ReferenceFramesCount : 0;
m_curFrameState.pList0ReferenceFrames = needsL0List ? m_curFrameState.pList0ReferenceFrames : nullptr,
m_curFrameState.List1ReferenceFramesCount = needsL1List ? m_curFrameState.List1ReferenceFramesCount : 0,
m_curFrameState.pList1ReferenceFrames = needsL1List ? m_curFrameState.pList1ReferenceFrames : nullptr;
if (!needsRefPicDescriptors) {
m_curFrameState.ReferenceFramesReconPictureDescriptorsCount = 0;
m_curFrameState.pReferenceFramesReconPictureDescriptors = nullptr;
} else {
m_curFrameState.ReferenceFramesReconPictureDescriptorsCount = static_cast<uint32_t>(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size());
m_curFrameStateDescriptorStorage.resize(m_curFrameState.ReferenceFramesReconPictureDescriptorsCount);
for(uint32_t idx = 0; idx < m_curFrameState.ReferenceFramesReconPictureDescriptorsCount ; idx++) {
m_curFrameStateDescriptorStorage[idx] = m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[idx].base;
}
m_curFrameState.pReferenceFramesReconPictureDescriptors = m_curFrameStateDescriptorStorage.data();
}
if (codecAllocation.DataSize != sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC))
return false;
*codecAllocation.pHEVCPicData = m_curFrameState;
print_l0_l1_lists();
print_dpb();
return true;
}
// Returns the resource allocation for a reconstructed picture output for the current frame
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE
d3d12_video_encoder_references_manager_hevc::get_current_frame_recon_pic_output_allocation()
{
return m_CurrentFrameReferencesData.ReconstructedPicTexture;
}
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES
d3d12_video_encoder_references_manager_hevc::get_current_reference_frames()
{
@ -217,109 +56,45 @@ d3d12_video_encoder_references_manager_hevc::get_current_reference_frames()
// Return nullptr for fully intra frames (eg IDR)
// and return references information for inter frames (eg.P/B) and I frame that doesn't flush DPB
if ((m_curFrameState.FrameType != D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_IDR_FRAME) && m_gopHasInterFrames) {
auto curRef = m_rDPBStorageManager.get_current_reference_frames();
retVal.NumTexture2Ds = curRef.NumTexture2Ds;
retVal.ppTexture2Ds = curRef.ppTexture2Ds;
retVal.pSubresources = curRef.pSubresources;
if (m_curFrameState.FrameType != D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_IDR_FRAME) {
retVal.NumTexture2Ds = m_CurrentFrameReferencesData.ReferenceTextures.pResources.size();
retVal.ppTexture2Ds = m_CurrentFrameReferencesData.ReferenceTextures.pResources.data();
// D3D12 Encode expects null subresources for AoT
bool isAoT = (std::all_of(m_CurrentFrameReferencesData.ReferenceTextures.pSubresources.begin(),
m_CurrentFrameReferencesData.ReferenceTextures.pSubresources.end(),
[](UINT i) { return i == 0; }));
retVal.pSubresources = isAoT ? nullptr : m_CurrentFrameReferencesData.ReferenceTextures.pSubresources.data();
}
return retVal;
}
void
d3d12_video_encoder_references_manager_hevc::prepare_current_frame_recon_pic_allocation()
static const char *
d3d12_video_encoder_friendly_frame_type_hevc(D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC picType)
{
m_CurrentFrameReferencesData.ReconstructedPicTexture = { nullptr, 0 };
// If all GOP are intra frames, no point in doing reference pic allocations
if (is_current_frame_used_as_reference() && m_gopHasInterFrames) {
auto reconPic = m_rDPBStorageManager.get_new_tracked_picture_allocation();
m_CurrentFrameReferencesData.ReconstructedPicTexture.pReconstructedPicture = reconPic.pReconstructedPicture;
m_CurrentFrameReferencesData.ReconstructedPicTexture.ReconstructedPictureSubresource =
reconPic.ReconstructedPictureSubresource;
}
}
void
d3d12_video_encoder_references_manager_hevc::update_fifo_dpb_push_front_cur_recon_pic()
{
// Keep the order of the dpb storage and dpb descriptors in a circular buffer
// order such that the DPB array consists of a sequence of frames in DECREASING encoding order
// eg. last frame encoded at first, followed by one to last frames encoded, and at the end
// the most distant frame encoded (currentFrameEncodeOrderNumber - MaxDPBSize)
// If current pic was not used as reference, current reconstructed picture resource is empty,
// No need to to anything in that case.
// Otherwise extract the reconstructed picture result and add it to the DPB
// If GOP are all intra frames, do nothing also.
if (is_current_frame_used_as_reference() && m_gopHasInterFrames) {
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] MaxDPBCapacity is %d - Number of pics in DPB is %d "
"when trying to put frame with POC %d (frame_num %d) at front of the DPB\n",
m_MaxDPBCapacity,
m_rDPBStorageManager.get_number_of_pics_in_dpb(),
m_curFrameState.PictureOrderCountNumber,
m_current_frame_idx);
// Release least recently used in DPB if we filled the m_MaxDPBCapacity allowed
if (m_rDPBStorageManager.get_number_of_pics_in_dpb() == m_MaxDPBCapacity) {
bool untrackedRes = false;
m_rDPBStorageManager.remove_reference_frame(m_rDPBStorageManager.get_number_of_pics_in_dpb() - 1,
&untrackedRes); // Remove last entry
// Verify that resource was untracked since this class is using the pool completely for allocations
assert(untrackedRes);
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.pop_back(); // Remove last entry
}
// Add new dpb to front of DPB
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE recAlloc = get_current_frame_recon_pic_output_allocation();
d3d12_video_reconstructed_picture refFrameDesc = {};
refFrameDesc.pReconstructedPicture = recAlloc.pReconstructedPicture;
refFrameDesc.ReconstructedPictureSubresource = recAlloc.ReconstructedPictureSubresource;
refFrameDesc.pVideoHeap = nullptr; // D3D12 Video Encode does not need the D3D12VideoEncoderHeap struct for HEVC
// (used for no-key-frame resolution change in VC1, AV1, etc)
m_rDPBStorageManager.insert_reference_frame(refFrameDesc, 0);
// Prepare D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC_EX for added DPB member
D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC_EX newDPBDescriptor =
switch (picType) {
case D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_P_FRAME:
{
// D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC
{
// UINT ReconstructedPictureResourceIndex;
0, // the associated reconstructed picture is also being pushed_front in m_rDPBStorageManager
// BOOL IsRefUsedByCurrentPic;
false, // usage is determined in method AdvanceFrame according to picture type to be encoded
// BOOL IsLongTermReference
false, // not implemented - neither FFMPEG or GSTREAMER use LTR for HEVC VAAPI
// UINT PictureOrderCountNumber;
m_curFrameState.PictureOrderCountNumber,
// UINT TemporalLayerIndex;
0
},
// reference_lists_frame_idx
m_current_frame_idx,
};
// Add DPB entry
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.insert(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.begin(),
newDPBDescriptor);
// Update the indices for ReconstructedPictureResourceIndex in pReferenceFramesReconPictureDescriptors
// to be in identity mapping with m_rDPBStorageManager indices
// after pushing the elements to the right in the push_front operation
for (uint32_t dpbResIdx = 1;
dpbResIdx < m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size();
dpbResIdx++) {
auto &dpbDesc = m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[dpbResIdx];
dpbDesc.base.ReconstructedPictureResourceIndex = dpbResIdx;
}
return "HEVC_P_FRAME";
} break;
case D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME:
{
return "HEVC_B_FRAME";
} break;
case D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_I_FRAME:
{
return "HEVC_I_FRAME";
} break;
case D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_IDR_FRAME:
{
return "HEVC_IDR_FRAME";
} break;
default:
{
unreachable("Unsupported D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC");
} break;
}
// Number of allocations, disregarding if they are used or not, should not exceed this limit due to reuse policies on
// DPB items removal.
assert(m_rDPBStorageManager.get_number_of_tracked_allocations() <= (m_MaxDPBCapacity + 1));
}
void
@ -328,6 +103,15 @@ d3d12_video_encoder_references_manager_hevc::print_l0_l1_lists()
if ((D3D12_DEBUG_VERBOSE & d3d12_debug) &&
((m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_P_FRAME) ||
(m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME))) {
debug_printf(
"[D3D12 Video Encoder Picture Manager HEVC] L0 (%d entries) and L1 (%d entries) lists for frame with POC "
"%d and frame_type %s are:\n",
m_curFrameState.List0ReferenceFramesCount,
m_curFrameState.List1ReferenceFramesCount,
m_curFrameState.PictureOrderCountNumber,
d3d12_video_encoder_friendly_frame_type_hevc(m_curFrameState.FrameType));
std::string list0ContentsString;
for (uint32_t idx = 0; idx < m_curFrameState.List0ReferenceFramesCount; idx++) {
uint32_t value = m_curFrameState.pList0ReferenceFrames[idx];
@ -335,24 +119,27 @@ d3d12_video_encoder_references_manager_hevc::print_l0_l1_lists()
list0ContentsString += std::to_string(value);
list0ContentsString += " - POC: ";
list0ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].base.PictureOrderCountNumber);
list0ContentsString += " - IsRefUsedByCurrentPic: ";
list0ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].base.IsRefUsedByCurrentPic);
list0ContentsString += " - IsLongTermReference: ";
list0ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].base.IsLongTermReference);
list0ContentsString += " - reference_lists_frame_idx: ";
list0ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].reference_lists_frame_idx);
list0ContentsString += "}\n";
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].PictureOrderCountNumber);
list0ContentsString += " }\n";
}
debug_printf(
"[D3D12 Video Encoder Picture Manager HEVC] L0 list for frame with POC %d (frame_num %d) is: \n %s \n",
m_curFrameState.PictureOrderCountNumber,
m_current_frame_idx,
list0ContentsString.c_str());
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] L0 list (%d entries) for frame with POC %d is: \n%s \n",
m_curFrameState.List0ReferenceFramesCount,
m_curFrameState.PictureOrderCountNumber,
list0ContentsString.c_str());
std::string modificationOrderList0ContentsString;
for (uint32_t idx = 0; idx < m_curFrameState.List0RefPicModificationsCount; idx++) {
modificationOrderList0ContentsString += "{ ";
modificationOrderList0ContentsString += std::to_string(m_curFrameState.pList0RefPicModifications[idx]);
modificationOrderList0ContentsString += " }\n";
}
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] L0 modification list (%d entries) for frame with POC %d "
" - temporal_id (%d) is: \n%s \n",
m_curFrameState.List0RefPicModificationsCount,
m_curFrameState.PictureOrderCountNumber,
m_curFrameState.TemporalLayerIndex,
modificationOrderList0ContentsString.c_str());
std::string list1ContentsString;
for (uint32_t idx = 0; idx < m_curFrameState.List1ReferenceFramesCount; idx++) {
@ -361,24 +148,28 @@ d3d12_video_encoder_references_manager_hevc::print_l0_l1_lists()
list1ContentsString += std::to_string(value);
list1ContentsString += " - POC: ";
list1ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].base.PictureOrderCountNumber);
list1ContentsString += " - IsRefUsedByCurrentPic: ";
list1ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].base.IsRefUsedByCurrentPic);
list1ContentsString += " - IsLongTermReference: ";
list1ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].base.IsLongTermReference);
list1ContentsString += " - reference_lists_frame_idx: ";
list1ContentsString += std::to_string(
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].reference_lists_frame_idx);
list1ContentsString += "}\n";
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[value].PictureOrderCountNumber);
list1ContentsString += " }\n";
}
debug_printf(
"[D3D12 Video Encoder Picture Manager HEVC] L1 list for frame with POC %d (frame_num %d) is: \n %s \n",
m_curFrameState.PictureOrderCountNumber,
m_current_frame_idx,
list1ContentsString.c_str());
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] L1 list (%d entries) for frame with POC %d is: \n%s \n",
m_curFrameState.List1ReferenceFramesCount,
m_curFrameState.PictureOrderCountNumber,
list1ContentsString.c_str());
std::string modificationOrderList1ContentsString;
for (uint32_t idx = 0; idx < m_curFrameState.List1RefPicModificationsCount; idx++) {
modificationOrderList1ContentsString += "{ ";
modificationOrderList1ContentsString += std::to_string(m_curFrameState.pList1RefPicModifications[idx]);
modificationOrderList1ContentsString += " }\n";
}
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] L1 modification list (%d entries) for frame with POC %d "
"- temporal_id (%d) is: \n%s \n",
m_curFrameState.List1RefPicModificationsCount,
m_curFrameState.PictureOrderCountNumber,
m_curFrameState.TemporalLayerIndex,
modificationOrderList1ContentsString.c_str());
}
}
@ -391,82 +182,202 @@ d3d12_video_encoder_references_manager_hevc::print_dpb()
dpbResIdx < m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size();
dpbResIdx++) {
auto &dpbDesc = m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[dpbResIdx];
auto dpbEntry = m_rDPBStorageManager.get_reference_frame(dpbDesc.base.ReconstructedPictureResourceIndex);
dpbContents += "{ DPBidx: ";
dpbContents += std::to_string(dpbResIdx);
dpbContents += " - POC: ";
dpbContents += std::to_string(dpbDesc.base.PictureOrderCountNumber);
dpbContents += std::to_string(dpbDesc.PictureOrderCountNumber);
dpbContents += " - IsRefUsedByCurrentPic: ";
dpbContents += std::to_string(dpbDesc.base.IsRefUsedByCurrentPic);
dpbContents += std::to_string(dpbDesc.IsRefUsedByCurrentPic);
dpbContents += " - IsLongTermReference: ";
dpbContents += std::to_string(dpbDesc.IsLongTermReference);
dpbContents += " - TemporalLayerIndex: ";
dpbContents += std::to_string(dpbDesc.TemporalLayerIndex);
dpbContents += " - DPBStorageIdx: ";
dpbContents += std::to_string(dpbDesc.base.ReconstructedPictureResourceIndex);
dpbContents += " - reference_lists_frame_idx: ";
dpbContents += std::to_string(dpbDesc.reference_lists_frame_idx);
dpbContents += std::to_string(dpbDesc.ReconstructedPictureResourceIndex);
dpbContents += " - DPBStorageResourcePtr: ";
char strBuf[256];
memset(&strBuf, '\0', 256);
sprintf(strBuf, "%p", dpbEntry.pReconstructedPicture);
sprintf(strBuf,
"%p",
m_CurrentFrameReferencesData.ReferenceTextures.pResources[dpbDesc.ReconstructedPictureResourceIndex]);
dpbContents += std::string(strBuf);
dpbContents += " - DPBStorageSubresource: ";
dpbContents += std::to_string(dpbEntry.ReconstructedPictureSubresource);
dpbContents += std::to_string(
m_CurrentFrameReferencesData.ReferenceTextures.pSubresources[dpbDesc.ReconstructedPictureResourceIndex]);
if (dpbDesc.PictureOrderCountNumber == m_curFrameState.PictureOrderCountNumber) {
dpbContents += " - CURRENT FRAME RECON PIC ";
}
dpbContents += "}\n";
}
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] DPB has %d frames - DPB references for frame with POC "
"%d are: \n %s \n",
m_rDPBStorageManager.get_number_of_pics_in_dpb(),
m_curFrameState.PictureOrderCountNumber,
dpbContents.c_str());
"%d and frame_type %s are: \n%s \n",
static_cast<UINT>(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size()),
m_curFrameState.PictureOrderCountNumber,
d3d12_video_encoder_friendly_frame_type_hevc(m_curFrameState.FrameType),
dpbContents.c_str());
}
}
// Advances state to next frame in GOP; subsequent calls to GetCurrentFrame* point to the advanced frame status
void
d3d12_video_encoder_references_manager_hevc::end_frame()
static D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC
d3d12_video_encoder_convert_frame_type_hevc(enum pipe_h2645_enc_picture_type picType)
{
debug_printf("[D3D12 Video Encoder Picture Manager HEVC] %d resources IN USE out of a total of %d ALLOCATED "
"resources at end_frame for frame with POC: %d\n",
m_rDPBStorageManager.get_number_of_in_use_allocations(),
m_rDPBStorageManager.get_number_of_tracked_allocations(),
m_curFrameState.PictureOrderCountNumber);
// Adds last used (if not null) get_current_frame_recon_pic_output_allocation to DPB for next EncodeFrame if
// necessary updates pReferenceFramesReconPictureDescriptors and updates the dpb storage
update_fifo_dpb_push_front_cur_recon_pic();
}
bool
d3d12_video_encoder_references_manager_hevc::is_current_frame_used_as_reference()
{
return m_isCurrentFrameUsedAsReference;
switch (picType) {
case PIPE_H2645_ENC_PICTURE_TYPE_P:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_P_FRAME;
} break;
case PIPE_H2645_ENC_PICTURE_TYPE_B:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME;
} break;
case PIPE_H2645_ENC_PICTURE_TYPE_I:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_I_FRAME;
} break;
case PIPE_H2645_ENC_PICTURE_TYPE_IDR:
{
return D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_IDR_FRAME;
} break;
default:
{
unreachable("Unsupported pipe_h2645_enc_picture_type");
} break;
}
}
void
d3d12_video_encoder_references_manager_hevc::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
bool bUsedAsReference, struct pipe_picture_desc* picture)
bool bUsedAsReference,
struct pipe_picture_desc *picture)
{
pipe_h265_enc_picture_desc * pPipeDesc = (pipe_h265_enc_picture_desc*) picture;
m_curFrameState = *curFrameData.pHEVCPicData;
m_isCurrentFrameUsedAsReference = bUsedAsReference;
m_current_frame_idx = pPipeDesc->frame_num;
debug_printf("Marking POC %d (frame_num %d) as reference ? %d\n",
curFrameData.pHEVCPicData->PictureOrderCountNumber,
m_current_frame_idx,
bUsedAsReference);
// Advance the GOP tracking state
bool isDPBFlushNeeded = (m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_IDR_FRAME);
if (isDPBFlushNeeded) {
reset_gop_tracking_and_dpb();
} else {
// Get new allocation from DPB storage for reconstructed picture
// This is only necessary for the frames that come after an IDR
// since in the initial state already has this initialized
// and re-initialized by reset_gop_tracking_and_dpb above
struct pipe_h265_enc_picture_desc *hevcPic = (struct pipe_h265_enc_picture_desc *) picture;
prepare_current_frame_recon_pic_allocation();
///
/// Copy DPB snapshot from pipe params
///
m_curFrameState.ReferenceFramesReconPictureDescriptorsCount =
static_cast<uint32_t>(m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size());
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.resize(hevcPic->dpb_size);
m_CurrentFrameReferencesData.ReferenceTextures.pResources.resize(hevcPic->dpb_size);
m_CurrentFrameReferencesData.ReferenceTextures.pSubresources.resize(hevcPic->dpb_size);
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.resize(hevcPic->dpb_size);
for (uint8_t i = 0; i < hevcPic->dpb_size; i++) {
//
// Set entry DPB members
//
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[i].IsLongTermReference =
hevcPic->dpb[i].is_ltr;
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[i].PictureOrderCountNumber =
hevcPic->dpb[i].pic_order_cnt;
// mirror indices between DPB entries and allocation arrays
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[i].ReconstructedPictureResourceIndex = i;
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[i].TemporalLayerIndex =
0u; // hevcPic->dpb[i].temporal_id;
// Check if this i-th dpb descriptor entry is referenced by any entry in L0 or L1 lists
// and set IsRefUsedByCurrentPic accordingly
auto endItL0 = hevcPic->ref_list0 + (hevcPic->num_ref_idx_l0_active_minus1 + 1);
bool bReferencesFromL0 = std::find(hevcPic->ref_list0, endItL0, i) != endItL0;
auto endItL1 = hevcPic->ref_list1 + (hevcPic->num_ref_idx_l1_active_minus1 + 1);
bool bReferencesFromL1 = std::find(hevcPic->ref_list1, endItL1, i) != endItL1;
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors[i].IsRefUsedByCurrentPic =
bReferencesFromL0 || bReferencesFromL1;
//
// Set texture allocations
//
struct d3d12_video_buffer *vidbuf = (struct d3d12_video_buffer *) hevcPic->dpb[i].buffer;
m_CurrentFrameReferencesData.ReferenceTextures.pResources[i] = d3d12_resource_resource(vidbuf->texture);
m_CurrentFrameReferencesData.ReferenceTextures.pSubresources[i] = vidbuf->idx_texarray_slots;
if (hevcPic->dpb[i].pic_order_cnt == hevcPic->pic_order_cnt) {
m_CurrentFrameReferencesData.ReconstructedPicTexture.pReconstructedPicture =
m_CurrentFrameReferencesData.ReferenceTextures.pResources[i];
m_CurrentFrameReferencesData.ReconstructedPicTexture.ReconstructedPictureSubresource =
m_CurrentFrameReferencesData.ReferenceTextures.pSubresources[i];
}
}
///
/// Set pic control info
///
m_curFrameState.FrameType = d3d12_video_encoder_convert_frame_type_hevc(hevcPic->picture_type);
m_curFrameState.PictureOrderCountNumber = hevcPic->pic_order_cnt;
m_curFrameState.TemporalLayerIndex = 0u; // hevcPic->temporal_id;
///
/// Set reference pics info
///
m_curFrameState.List0ReferenceFramesCount = 0;
m_curFrameState.pList0ReferenceFrames = nullptr;
m_curFrameState.List0RefPicModificationsCount = 0;
m_curFrameState.pList0RefPicModifications = nullptr;
m_curFrameState.List1ReferenceFramesCount = 0;
m_curFrameState.pList1ReferenceFrames = nullptr;
m_curFrameState.List1RefPicModificationsCount = 0;
m_curFrameState.pList1RefPicModifications = nullptr;
m_curFrameState.ReferenceFramesReconPictureDescriptorsCount = 0u;
m_curFrameState.pReferenceFramesReconPictureDescriptors = nullptr;
if ((m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_P_FRAME) ||
(m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME)) {
// Set DPB descriptors
m_curFrameState.ReferenceFramesReconPictureDescriptorsCount =
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.size();
m_curFrameState.pReferenceFramesReconPictureDescriptors =
m_CurrentFrameReferencesData.pReferenceFramesReconPictureDescriptors.data();
// Deep Copy L0 list
m_curFrameState.List0ReferenceFramesCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
m_CurrentFrameReferencesData.pList0ReferenceFrames.resize(m_curFrameState.List0ReferenceFramesCount);
for (unsigned i = 0; i < m_curFrameState.List0ReferenceFramesCount; i++)
m_CurrentFrameReferencesData.pList0ReferenceFrames[i] = hevcPic->ref_list0[i];
m_curFrameState.pList0ReferenceFrames = m_CurrentFrameReferencesData.pList0ReferenceFrames.data();
// Deep Copy L0 ref modification list
if (hevcPic->slice.ref_pic_lists_modification.ref_pic_list_modification_flag_l0) {
m_curFrameState.List0RefPicModificationsCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
m_CurrentFrameReferencesData.pList0RefPicModifications.resize(m_curFrameState.List0RefPicModificationsCount);
for (unsigned i = 0; i < m_curFrameState.List0RefPicModificationsCount; i++)
m_CurrentFrameReferencesData.pList0RefPicModifications[i] =
hevcPic->slice.ref_pic_lists_modification.list_entry_l0[i];
m_curFrameState.pList0RefPicModifications = m_CurrentFrameReferencesData.pList0RefPicModifications.data();
}
}
if (m_curFrameState.FrameType == D3D12_VIDEO_ENCODER_FRAME_TYPE_HEVC_B_FRAME) {
// Deep Copy L1 list
m_curFrameState.List1ReferenceFramesCount = hevcPic->num_ref_idx_l1_active_minus1 + 1;
m_CurrentFrameReferencesData.pList1ReferenceFrames.resize(m_curFrameState.List1ReferenceFramesCount);
for (unsigned i = 0; i < m_curFrameState.List1ReferenceFramesCount; i++)
m_CurrentFrameReferencesData.pList1ReferenceFrames[i] = hevcPic->ref_list1[i];
m_curFrameState.pList1ReferenceFrames = m_CurrentFrameReferencesData.pList1ReferenceFrames.data();
// Deep Copy L1 ref modification list
if (hevcPic->slice.ref_pic_lists_modification.ref_pic_list_modification_flag_l1) {
m_curFrameState.List1RefPicModificationsCount = hevcPic->num_ref_idx_l1_active_minus1 + 1;
m_CurrentFrameReferencesData.pList1RefPicModifications.resize(m_curFrameState.List1RefPicModificationsCount);
for (unsigned i = 0; i < m_curFrameState.List1RefPicModificationsCount; i++)
m_CurrentFrameReferencesData.pList1RefPicModifications[i] =
hevcPic->slice.ref_pic_lists_modification.list_entry_l1[i];
m_curFrameState.pList1RefPicModifications = m_CurrentFrameReferencesData.pList1RefPicModifications.data();
}
}
print_dpb();
print_l0_l1_lists();
}

View file

@ -21,67 +21,66 @@
* IN THE SOFTWARE.
*/
#ifndef D3D12_VIDEO_ENCODE_FIFO_REFERENCES_MANAGER_HEVC_H
#define D3D12_VIDEO_ENCODE_FIFO_REFERENCES_MANAGER_HEVC_H
#ifndef D3D12_VIDEO_ENCODE_REFERENCES_MANAGER_HEVC_H
#define D3D12_VIDEO_ENCODE_REFERENCES_MANAGER_HEVC_H
#include "d3d12_video_types.h"
#include "d3d12_video_encoder_references_manager.h"
#include "d3d12_video_dpb_storage_manager.h"
class d3d12_video_encoder_references_manager_hevc : public d3d12_video_encoder_references_manager_interface
{
public:
void end_frame();
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData, bool bUsedAsReference, struct pipe_picture_desc* picture);
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE get_current_frame_recon_pic_output_allocation();
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
bool bUsedAsReference,
struct pipe_picture_desc *picture);
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation);
bool is_current_frame_used_as_reference();
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames();
d3d12_video_encoder_references_manager_hevc(bool gopHasInterCodedFrames,
d3d12_video_dpb_storage_manager_interface &rDpbStorageManager,
uint32_t MaxDPBCapacity);
bool is_current_frame_used_as_reference()
{
return m_isCurrentFrameUsedAsReference;
}
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE get_current_frame_recon_pic_output_allocation()
{
return m_CurrentFrameReferencesData.ReconstructedPicTexture;
}
void end_frame()
{ }
d3d12_video_encoder_references_manager_hevc()
{ }
~d3d12_video_encoder_references_manager_hevc()
{ }
private:
// Class helpers
void prepare_current_frame_recon_pic_allocation();
void reset_gop_tracking_and_dpb();
void update_fifo_dpb_push_front_cur_recon_pic();
void print_dpb();
void print_l0_l1_lists();
// Class members
uint32_t m_MaxDPBCapacity = 0;
struct D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC_EX {
D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC base;
/* the upper layer uses pipe_h265_enc_picture_desc.frame_num to identify frames
in the L0 and L1 reference lists. This frame_num is different than POC
so let's save it in this variable to be able to reverse-map the L0/L1 lists from
these indices into POCs */
unsigned int reference_lists_frame_idx;
};
struct d3d12_video_dpb
{
std::vector<ID3D12Resource *> pResources;
std::vector<uint32_t> pSubresources;
};
struct current_frame_references_data
{
std::vector<D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC_EX> pReferenceFramesReconPictureDescriptors;
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE ReconstructedPicTexture;
std::vector<D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC> pReferenceFramesReconPictureDescriptors;
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE ReconstructedPicTexture;
d3d12_video_dpb ReferenceTextures;
std::vector<UINT> pList0ReferenceFrames;
std::vector<UINT> pList1ReferenceFrames;
std::vector<UINT> pList0RefPicModifications;
std::vector<UINT> pList1RefPicModifications;
};
d3d12_video_dpb_storage_manager_interface &m_rDPBStorageManager;
current_frame_references_data m_CurrentFrameReferencesData;
bool m_gopHasInterFrames = false;
bool m_isCurrentFrameUsedAsReference = false;
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC m_curFrameState = {};
std::vector<D3D12_VIDEO_ENCODER_REFERENCE_PICTURE_DESCRIPTOR_HEVC> m_curFrameStateDescriptorStorage;
unsigned int m_current_frame_idx = 0;
};
#endif