mirror of
https://gitlab.freedesktop.org/mesa/mesa.git
synced 2025-12-24 19:40:10 +01:00
d3d12: Remove Agility v717 guards for features now available in v618
Reviewed-by: Jesse Natalie <jenatali@microsoft.com> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/37581>
This commit is contained in:
parent
0e73c6470e
commit
b06b2fbaba
23 changed files with 272 additions and 654 deletions
|
|
@ -299,10 +299,8 @@ struct d3d12_context {
|
|||
void *stencil_resolve_vs, *stencil_resolve_fs, *stencil_resolve_fs_no_flip, *sampler_state;
|
||||
#endif // HAVE_GALLIUM_D3D12_GRAPHICS
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
struct d3d12_context_queue_priority_manager* priority_manager; // Object passed and managed by frontend
|
||||
mtx_t priority_manager_lock; // Mutex to protect access to priority_manager
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
uint32_t max_video_encoding_async_depth = 0u;
|
||||
};
|
||||
|
|
@ -332,10 +330,8 @@ d3d12_current_batch(struct d3d12_context *ctx)
|
|||
struct pipe_context *
|
||||
d3d12_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags);
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
int
|
||||
d3d12_context_set_queue_priority_manager(struct pipe_context *ctx, struct d3d12_context_queue_priority_manager *priority_manager);
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
int
|
||||
d3d12_video_encoder_set_max_async_queue_depth(struct pipe_context *ctx, uint32_t max_async_depth);
|
||||
|
|
|
|||
|
|
@ -67,8 +67,6 @@ d3d12_context_destroy(struct pipe_context *pctx)
|
|||
{
|
||||
struct d3d12_context *ctx = d3d12_context(pctx);
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
if (ctx->priority_manager)
|
||||
{
|
||||
struct d3d12_screen *screen = d3d12_screen(pctx->screen);
|
||||
|
|
@ -81,8 +79,6 @@ d3d12_context_destroy(struct pipe_context *pctx)
|
|||
mtx_destroy(&ctx->priority_manager_lock);
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
struct d3d12_screen *screen = d3d12_screen(pctx->screen);
|
||||
mtx_lock(&screen->submit_mutex);
|
||||
list_del(&ctx->context_list_entry);
|
||||
|
|
@ -356,8 +352,6 @@ d3d12_video_create_codec(struct pipe_context *context,
|
|||
}
|
||||
#endif
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
int
|
||||
d3d12_context_set_queue_priority(struct d3d12_context_queue_priority_manager* manager,
|
||||
ID3D12CommandQueue *d3d12_queue,
|
||||
|
|
@ -371,7 +365,7 @@ d3d12_context_set_queue_priority(struct d3d12_context_queue_priority_manager* ma
|
|||
mtx_lock(&ctx12->priority_manager_lock);
|
||||
{
|
||||
// Set the queue priority
|
||||
ComPtr<ID3D12CommandQueueDynamicPriorityPreview> prio_iface;
|
||||
ComPtr<ID3D12CommandQueue1> prio_iface;
|
||||
if(FAILED(d3d12_queue->QueryInterface(IID_PPV_ARGS(&prio_iface))))
|
||||
{
|
||||
mtx_unlock(&ctx12->priority_manager_lock);
|
||||
|
|
@ -407,7 +401,7 @@ d3d12_context_get_queue_priority(struct d3d12_context_queue_priority_manager* ma
|
|||
|
||||
mtx_lock(&ctx12->priority_manager_lock);
|
||||
{
|
||||
ComPtr<ID3D12CommandQueueDynamicPriorityPreview> prio_iface;
|
||||
ComPtr<ID3D12CommandQueue1> prio_iface;
|
||||
if (FAILED(d3d12_queue->QueryInterface(IID_PPV_ARGS(&prio_iface))))
|
||||
{
|
||||
mtx_unlock(&ctx12->priority_manager_lock);
|
||||
|
|
@ -479,8 +473,6 @@ d3d12_context_set_queue_priority_manager(struct pipe_context *ctx, struct d3d12_
|
|||
return 0;
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
int
|
||||
d3d12_video_encoder_set_max_async_queue_depth(struct pipe_context *ctx, uint32_t max_async_depth)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -45,8 +45,6 @@ struct d3d12_interop_resource_info {
|
|||
uint64_t buffer_offset;
|
||||
};
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
/*
|
||||
* Structure that contains information about scheduling priority management
|
||||
* for GPU workloads exposed through work queues.
|
||||
|
|
@ -193,8 +191,6 @@ struct d3d12_interop_device_info1 {
|
|||
int (*set_video_encoder_max_async_queue_depth)(struct pipe_context *context, uint32_t max_async_queue_depth);
|
||||
};
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -1158,14 +1158,12 @@ d3d12_interop_query_device_info(struct pipe_screen *pscreen, uint32_t data_size,
|
|||
info->device = screen->dev;
|
||||
info->queue = screen->cmdqueue;
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
if (data_size >= sizeof(d3d12_interop_device_info1)) {
|
||||
d3d12_interop_device_info1 *info1 = (d3d12_interop_device_info1 *)data;
|
||||
info1->set_context_queue_priority_manager = d3d12_context_set_queue_priority_manager;
|
||||
info1->set_video_encoder_max_async_queue_depth = d3d12_video_encoder_set_max_async_queue_depth;
|
||||
return sizeof(*info1);
|
||||
}
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
return sizeof(*info);
|
||||
}
|
||||
|
|
@ -1614,11 +1612,9 @@ d3d12_init_screen(struct d3d12_screen *screen, IUnknown *adapter)
|
|||
}
|
||||
#endif // HAVE_GALLIUM_D3D12_GRAPHICS
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
if (d3d12_init_screen_command_queue(screen, D3D12_COMMAND_QUEUE_FLAG_ALLOW_DYNAMIC_PRIORITY)) {
|
||||
screen->supports_dynamic_queue_priority = true;
|
||||
} else
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
{
|
||||
screen->supports_dynamic_queue_priority = false;
|
||||
if (!d3d12_init_screen_command_queue(screen, D3D12_COMMAND_QUEUE_FLAG_NONE)) {
|
||||
|
|
|
|||
|
|
@ -132,8 +132,6 @@ d3d12_video_create_decoder(struct pipe_context *context, const struct pipe_video
|
|||
goto failed;
|
||||
}
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
if (pD3D12Ctx->priority_manager)
|
||||
{
|
||||
// Register queue with priority manager
|
||||
|
|
@ -145,8 +143,6 @@ d3d12_video_create_decoder(struct pipe_context *context, const struct pipe_video
|
|||
}
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
return &pD3D12Dec->base;
|
||||
|
||||
failed:
|
||||
|
|
@ -197,8 +193,6 @@ d3d12_video_decoder_destroy(struct pipe_video_codec *codec)
|
|||
|
||||
// No need for m_pD3D12Screen as it is not managed by d3d12_video_decoder
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
struct d3d12_context* ctx = d3d12_context(pD3D12Dec->base.context);
|
||||
if (ctx->priority_manager)
|
||||
{
|
||||
|
|
@ -209,8 +203,6 @@ d3d12_video_decoder_destroy(struct pipe_video_codec *codec)
|
|||
}
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
// Call dtor to make ComPtr work
|
||||
delete pD3D12Dec;
|
||||
}
|
||||
|
|
@ -854,10 +846,8 @@ d3d12_video_decoder_create_command_objects(const struct d3d12_screen *pD3D12Scre
|
|||
assert(pD3D12Dec->m_spD3D12VideoDevice);
|
||||
|
||||
D3D12_COMMAND_QUEUE_DESC commandQueueDesc = { D3D12_COMMAND_LIST_TYPE_VIDEO_DECODE };
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
if (pD3D12Screen->supports_dynamic_queue_priority)
|
||||
commandQueueDesc.Flags |= D3D12_COMMAND_QUEUE_FLAG_ALLOW_DYNAMIC_PRIORITY;
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
HRESULT hr = pD3D12Screen->dev->CreateCommandQueue(&commandQueueDesc,
|
||||
IID_PPV_ARGS(pD3D12Dec->m_spDecodeCommandQueue.GetAddressOf()));
|
||||
|
|
|
|||
|
|
@ -258,8 +258,6 @@ d3d12_video_encoder_destroy(struct pipe_video_codec *codec)
|
|||
if (pD3D12Enc->m_SliceHeaderRepackBuffer)
|
||||
pD3D12Enc->m_screen->resource_destroy(pD3D12Enc->m_screen, pD3D12Enc->m_SliceHeaderRepackBuffer);
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
struct d3d12_context* ctx = d3d12_context(pD3D12Enc->base.context);
|
||||
if (ctx->priority_manager)
|
||||
{
|
||||
|
|
@ -270,8 +268,6 @@ d3d12_video_encoder_destroy(struct pipe_video_codec *codec)
|
|||
}
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
// Call d3d12_video_encoder dtor to make ComPtr and other member's destructors work
|
||||
delete pD3D12Enc;
|
||||
}
|
||||
|
|
@ -303,7 +299,6 @@ d3d12_video_encoder_friendly_frame_type_h264(D3D12_VIDEO_ENCODER_FRAME_TYPE_H264
|
|||
}
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
static D3D12_VIDEO_ENCODER_FRAME_INPUT_MOTION_UNIT_PRECISION
|
||||
d3d12_video_encoder_convert_move_precision(enum pipe_enc_move_info_precision_unit precision)
|
||||
{
|
||||
|
|
@ -328,14 +323,12 @@ d3d12_video_encoder_convert_move_precision(enum pipe_enc_move_info_precision_uni
|
|||
} break;
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
void
|
||||
d3d12_video_encoder_update_move_rects(struct d3d12_video_encoder *pD3D12Enc,
|
||||
const struct pipe_enc_move_info& rects)
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
memset(&pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc, 0, sizeof(pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc));
|
||||
pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc = {};
|
||||
pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapSource = rects.input_mode == PIPE_ENC_MOVE_INFO_INPUT_MODE_RECTS ?
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER : D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE;
|
||||
|
||||
|
|
@ -384,14 +377,11 @@ d3d12_video_encoder_update_move_rects(struct d3d12_video_encoder *pD3D12Enc,
|
|||
pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.MotionUnitPrecision = d3d12_video_encoder_convert_move_precision(rects.precision);
|
||||
// pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.PictureControlConfiguration is set later as not all the params are ready at this stage
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
static void d3d12_video_encoder_is_gpu_qmap_input_feature_enabled(struct d3d12_video_encoder* pD3D12Enc, BOOL& isEnabled, D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE &outMapSourceEnabled)
|
||||
{
|
||||
isEnabled = FALSE;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
//
|
||||
// Prefer GPU QP Map over CPU QP Delta Map if both are enabled
|
||||
//
|
||||
|
|
@ -409,9 +399,7 @@ static void d3d12_video_encoder_is_gpu_qmap_input_feature_enabled(struct d3d12_v
|
|||
outMapSourceEnabled = D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE;
|
||||
assert(!pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.AppRequested); // When enabling GPU QP Map, CPU QP Delta must be disabled
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
void
|
||||
d3d12_video_encoder_update_qpmap_input(struct d3d12_video_encoder *pD3D12Enc,
|
||||
|
|
@ -419,11 +407,10 @@ d3d12_video_encoder_update_qpmap_input(struct d3d12_video_encoder *pD3D12Enc,
|
|||
struct pipe_enc_roi roi,
|
||||
uint32_t temporal_id)
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
//
|
||||
// Clear QPDelta context for this frame
|
||||
//
|
||||
memset(&pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc, 0, sizeof(pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc));
|
||||
pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc = {};
|
||||
pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[temporal_id].m_Flags &= ~D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_DELTA_QP;
|
||||
|
||||
//
|
||||
|
|
@ -475,7 +462,6 @@ d3d12_video_encoder_update_qpmap_input(struct d3d12_video_encoder *pD3D12Enc,
|
|||
pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInputBuffer.m_p_qp_map_cpu);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void
|
||||
|
|
@ -501,7 +487,6 @@ d3d12_video_encoder_update_rate_control_saq(struct d3d12_video_encoder *pD3D12En
|
|||
void d3d12_video_encoder_initialize_two_pass(struct d3d12_video_encoder *pD3D12Enc,
|
||||
const struct pipe_enc_two_pass_encoder_config& two_pass)
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc = {};
|
||||
|
||||
|
|
@ -517,11 +502,8 @@ void d3d12_video_encoder_initialize_two_pass(struct d3d12_video_encoder *pD3D12E
|
|||
pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc.Pow2DownscaleFactor = two_pass.pow2_downscale_factor;
|
||||
pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc.bUseExternalDPBScaling = two_pass.skip_1st_dpb_texture;
|
||||
}
|
||||
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
static
|
||||
struct pipe_enc_two_pass_frame_config
|
||||
d3d12_video_encoder_get_two_pass_config_from_picparams(struct pipe_picture_desc* picture,
|
||||
|
|
@ -546,9 +528,8 @@ d3d12_video_encoder_get_two_pass_config_from_picparams(struct pipe_picture_desc*
|
|||
UNREACHABLE("Unsupported pipe_video_format");
|
||||
} break;
|
||||
}
|
||||
return twopass_frame_config;
|
||||
return twopass_frame_config;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
/*
|
||||
* Caller once per frame to update the frame specific two pass settings
|
||||
|
|
@ -563,7 +544,6 @@ d3d12_video_encoder_update_two_pass_frame_settings(struct d3d12_video_encoder *p
|
|||
enum pipe_video_format codec,
|
||||
struct pipe_picture_desc* picture)
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc.AppRequested)
|
||||
{
|
||||
struct pipe_enc_two_pass_frame_config two_pass_frame_cfg = d3d12_video_encoder_get_two_pass_config_from_picparams(picture, codec);
|
||||
|
|
@ -670,15 +650,13 @@ d3d12_video_encoder_update_two_pass_frame_settings(struct d3d12_video_encoder *p
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
|
||||
void
|
||||
d3d12_video_encoder_update_dirty_rects(struct d3d12_video_encoder *pD3D12Enc,
|
||||
const struct pipe_enc_dirty_info& rects)
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
memset(&pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc, 0, sizeof(pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc));
|
||||
pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc = {};
|
||||
|
||||
pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapSource = rects.input_mode == PIPE_ENC_DIRTY_INFO_INPUT_MODE_RECTS ?
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER : D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE;
|
||||
|
|
@ -719,7 +697,6 @@ d3d12_video_encoder_update_dirty_rects(struct d3d12_video_encoder *pD3D12Enc,
|
|||
assert(pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapInfo.FullFrameIdentical ||
|
||||
pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapInfo.InputMap);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void
|
||||
|
|
@ -727,13 +704,8 @@ d3d12_video_encoder_update_picparams_tracking(struct d3d12_video_encoder *pD3D12
|
|||
struct pipe_video_buffer * srcTexture,
|
||||
struct pipe_picture_desc * picture)
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams =
|
||||
d3d12_video_encoder_get_current_picture_param_settings1(pD3D12Enc);
|
||||
#else
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA currentPicParams =
|
||||
d3d12_video_encoder_get_current_picture_param_settings(pD3D12Enc);
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
|
||||
bool bUsedAsReference = false;
|
||||
|
|
@ -957,9 +929,7 @@ d3d12_video_encoder_reconfigure_encoder_objects(struct d3d12_video_encoder *pD3D
|
|||
0 /*checking the flag is NOT set*/))
|
||||
// || motionPrecisionLimitChanged // Only affects encoder
|
||||
// Re-create encoder heap if dirty regions changes and the current heap doesn't already support them
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|| dirtyRegionsChanged && ((pD3D12Enc->m_spVideoEncoderHeap->GetEncoderHeapFlags() & D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_DIRTY_REGIONS) == 0)
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|| (dirtyRegionsChanged && ((pD3D12Enc->m_spVideoEncoderHeap->GetEncoderHeapFlags() & D3D12_VIDEO_ENCODER_HEAP_FLAG_ALLOW_DIRTY_REGIONS) == 0))
|
||||
) {
|
||||
if (!pD3D12Enc->m_spVideoEncoderHeap) {
|
||||
debug_printf("[d3d12_video_encoder] d3d12_video_encoder_reconfigure_encoder_objects - Creating "
|
||||
|
|
@ -970,7 +940,6 @@ d3d12_video_encoder_reconfigure_encoder_objects(struct d3d12_video_encoder *pD3D
|
|||
}
|
||||
|
||||
HRESULT hr = S_OK;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
ComPtr<ID3D12VideoDevice4> spVideoDevice4;
|
||||
if (SUCCEEDED(pD3D12Enc->m_spD3D12VideoDevice->QueryInterface(
|
||||
IID_PPV_ARGS(spVideoDevice4.GetAddressOf()))))
|
||||
|
|
@ -1026,7 +995,6 @@ d3d12_video_encoder_reconfigure_encoder_objects(struct d3d12_video_encoder *pD3D
|
|||
}
|
||||
}
|
||||
else
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_HEAP_DESC heapDesc = { pD3D12Enc->m_NodeMask,
|
||||
D3D12_VIDEO_ENCODER_HEAP_FLAG_NONE,
|
||||
|
|
@ -1147,12 +1115,10 @@ d3d12_video_encoder_get_current_slice_param_settings(struct d3d12_video_encoder
|
|||
return subregionData;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_encoderSliceConfigMode ==
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_AUTO) {
|
||||
return subregionData;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
|
||||
switch (codec) {
|
||||
|
|
@ -1189,12 +1155,12 @@ d3d12_video_encoder_get_current_slice_param_settings(struct d3d12_video_encoder
|
|||
return subregionData;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1
|
||||
d3d12_video_encoder_get_current_picture_param_settings1(struct d3d12_video_encoder *pD3D12Enc)
|
||||
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
|
||||
d3d12_video_encoder_get_current_picture_param_settings_legacy(struct d3d12_video_encoder *pD3D12Enc)
|
||||
{
|
||||
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curPicParamsData = {};
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curPicParamsData = {};
|
||||
switch (codec) {
|
||||
#if VIDEO_CODEC_H264ENC
|
||||
case PIPE_VIDEO_FORMAT_MPEG4_AVC:
|
||||
|
|
@ -1206,8 +1172,9 @@ d3d12_video_encoder_get_current_picture_param_settings1(struct d3d12_video_encod
|
|||
#if VIDEO_CODEC_H265ENC
|
||||
case PIPE_VIDEO_FORMAT_HEVC:
|
||||
{
|
||||
curPicParamsData.pHEVCPicData = &pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData;
|
||||
curPicParamsData.DataSize = sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2);
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 is binary compatible with D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC
|
||||
curPicParamsData.pHEVCPicData = reinterpret_cast<D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC*>(&pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData);
|
||||
curPicParamsData.DataSize = sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC);
|
||||
} break;
|
||||
#endif
|
||||
#if VIDEO_CODEC_AV1ENC
|
||||
|
|
@ -1224,13 +1191,12 @@ d3d12_video_encoder_get_current_picture_param_settings1(struct d3d12_video_encod
|
|||
}
|
||||
return curPicParamsData;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1
|
||||
d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encoder *pD3D12Enc)
|
||||
{
|
||||
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curPicParamsData = {};
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curPicParamsData = {};
|
||||
switch (codec) {
|
||||
#if VIDEO_CODEC_H264ENC
|
||||
case PIPE_VIDEO_FORMAT_MPEG4_AVC:
|
||||
|
|
@ -1242,9 +1208,8 @@ d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encode
|
|||
#if VIDEO_CODEC_H265ENC
|
||||
case PIPE_VIDEO_FORMAT_HEVC:
|
||||
{
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 binary-compatible with D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC
|
||||
curPicParamsData.pHEVCPicData = reinterpret_cast<D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC*>(&pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData);
|
||||
curPicParamsData.DataSize = sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC);
|
||||
curPicParamsData.pHEVCPicData = &pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData;
|
||||
curPicParamsData.DataSize = sizeof(pD3D12Enc->m_currentEncodeConfig.m_encoderPicParamsDesc.m_HEVCPicData);
|
||||
} break;
|
||||
#endif
|
||||
#if VIDEO_CODEC_AV1ENC
|
||||
|
|
@ -1689,7 +1654,6 @@ d3d12_video_encoder_disable_rc_minmaxqp(struct D3D12EncodeRateControlState & rcS
|
|||
}
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
static bool d3d12_video_encoder_is_move_regions_feature_enabled(struct d3d12_video_encoder* pD3D12Enc, D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE mapSource)
|
||||
{
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapSource != mapSource)
|
||||
|
|
@ -1707,9 +1671,7 @@ static bool d3d12_video_encoder_is_move_regions_feature_enabled(struct d3d12_vid
|
|||
}
|
||||
return false;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
static bool d3d12_video_encoder_is_dirty_regions_feature_enabled(struct d3d12_video_encoder* pD3D12Enc, D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE mapSource)
|
||||
{
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapSource != mapSource)
|
||||
|
|
@ -1729,7 +1691,6 @@ static bool d3d12_video_encoder_is_dirty_regions_feature_enabled(struct d3d12_vi
|
|||
}
|
||||
return false;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
static void
|
||||
d3d12_video_encoder_disable_rc_extended1_to_legacy(struct D3D12EncodeRateControlState & rcState)
|
||||
|
|
@ -1747,20 +1708,16 @@ d3d12_video_encoder_disable_rc_extended1_to_legacy(struct D3D12EncodeRateControl
|
|||
/// Note that with fallbacks, the upper layer will not get exactly the encoding seetings they requested
|
||||
/// but for very particular settings it's better to continue with warnings than failing the whole encoding process
|
||||
///
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc, D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData1) {
|
||||
#else
|
||||
bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc, D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 &capEncoderSupportData1) {
|
||||
#endif
|
||||
bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc, D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData) {
|
||||
|
||||
///
|
||||
/// Check for general support
|
||||
/// Check for validation errors (some drivers return general support but also validation errors anyways, work around for those unexpected cases)
|
||||
///
|
||||
|
||||
bool configSupported = d3d12_video_encoder_query_d3d12_driver_caps(pD3D12Enc, /*inout*/ capEncoderSupportData1)
|
||||
&& (((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_GENERAL_SUPPORT_OK) != 0)
|
||||
&& (capEncoderSupportData1.ValidationFlags == D3D12_VIDEO_ENCODER_VALIDATION_FLAG_NONE));
|
||||
bool configSupported = d3d12_video_encoder_query_d3d12_driver_caps(pD3D12Enc, /*inout*/ capEncoderSupportData)
|
||||
&& (((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_GENERAL_SUPPORT_OK) != 0)
|
||||
&& (capEncoderSupportData.ValidationFlags == D3D12_VIDEO_ENCODER_VALIDATION_FLAG_NONE));
|
||||
|
||||
///
|
||||
/// If D3D12_FEATURE_VIDEO_ENCODER_SUPPORT is not supported, try falling back to unsetting optional features and check for caps again
|
||||
|
|
@ -1769,7 +1726,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
if (!configSupported) {
|
||||
debug_printf("[d3d12_video_encoder] WARNING: D3D12_FEATURE_VIDEO_ENCODER_SUPPORT is not supported, trying fallback to unsetting optional features\n");
|
||||
|
||||
bool isRequestingVBVSizesSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_VBV_SIZE_CONFIG_AVAILABLE) != 0);
|
||||
bool isRequestingVBVSizesSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_VBV_SIZE_CONFIG_AVAILABLE) != 0);
|
||||
bool isClientRequestingVBVSizes = ((pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex].m_Flags & D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_VBV_SIZES) != 0);
|
||||
|
||||
if(isClientRequestingVBVSizes && !isRequestingVBVSizesSupported) {
|
||||
|
|
@ -1777,7 +1734,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
d3d12_video_encoder_disable_rc_vbv_sizes(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]);
|
||||
}
|
||||
|
||||
bool isRequestingPeakFrameSizeSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0);
|
||||
bool isRequestingPeakFrameSizeSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0);
|
||||
bool isClientRequestingPeakFrameSize = ((pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex].m_Flags & D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_MAX_FRAME_SIZE) != 0);
|
||||
|
||||
if(isClientRequestingPeakFrameSize && !isRequestingPeakFrameSizeSupported) {
|
||||
|
|
@ -1785,7 +1742,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
d3d12_video_encoder_disable_rc_maxframesize(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]);
|
||||
}
|
||||
|
||||
bool isRequestingQPRangesSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_ADJUSTABLE_QP_RANGE_AVAILABLE) != 0);
|
||||
bool isRequestingQPRangesSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_ADJUSTABLE_QP_RANGE_AVAILABLE) != 0);
|
||||
bool isClientRequestingQPRanges = ((pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex].m_Flags & D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_QP_RANGE) != 0);
|
||||
|
||||
if(isClientRequestingQPRanges && !isRequestingQPRangesSupported) {
|
||||
|
|
@ -1793,7 +1750,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
d3d12_video_encoder_disable_rc_minmaxqp(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]);
|
||||
}
|
||||
|
||||
bool isRequestingDeltaQPSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0);
|
||||
bool isRequestingDeltaQPSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0);
|
||||
bool isClientRequestingDeltaQP = ((pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex].m_Flags & D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_DELTA_QP) != 0);
|
||||
|
||||
if(isClientRequestingDeltaQP && !isRequestingDeltaQPSupported) {
|
||||
|
|
@ -1801,7 +1758,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
d3d12_video_encoder_disable_rc_deltaqp(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]);
|
||||
}
|
||||
|
||||
bool isRequestingExtended1RCSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_EXTENSION1_SUPPORT) != 0);
|
||||
bool isRequestingExtended1RCSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_EXTENSION1_SUPPORT) != 0);
|
||||
bool isClientRequestingExtended1RC = ((pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex].m_Flags & D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_EXTENSION1_SUPPORT) != 0);
|
||||
|
||||
if(isClientRequestingExtended1RC && !isRequestingExtended1RCSupported) {
|
||||
|
|
@ -1812,7 +1769,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
/* d3d12_video_encoder_disable_rc_extended1_to_legacy may change m_Flags */
|
||||
if ((pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex].m_Flags & D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_EXTENSION1_SUPPORT) != 0)
|
||||
{ // Quality levels also requires D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_EXTENSION1_SUPPORT
|
||||
bool isRequestingQualityLevelsSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_QUALITY_VS_SPEED_AVAILABLE) != 0);
|
||||
bool isRequestingQualityLevelsSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_QUALITY_VS_SPEED_AVAILABLE) != 0);
|
||||
bool isClientRequestingQualityLevels = ((pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex].m_Flags & D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_QUALITY_VS_SPEED) != 0);
|
||||
|
||||
if (isClientRequestingQualityLevels)
|
||||
|
|
@ -1820,7 +1777,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
if (!isRequestingQualityLevelsSupported) {
|
||||
debug_printf("[d3d12_video_encoder] WARNING: Requested D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_QUALITY_VS_SPEED but the feature is not supported, will continue encoding unsetting this feature as fallback.\n");
|
||||
d3d12_video_encoder_disable_rc_qualitylevels(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]);
|
||||
} else if (!d3d12_video_encoder_is_qualitylevel_in_range(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex], capEncoderSupportData1.MaxQualityVsSpeed)) {
|
||||
} else if (!d3d12_video_encoder_is_qualitylevel_in_range(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex], capEncoderSupportData.MaxQualityVsSpeed)) {
|
||||
debug_printf("[d3d12_video_encoder] WARNING: Requested D3D12_VIDEO_ENCODER_RATE_CONTROL_FLAG_ENABLE_QUALITY_VS_SPEED but the value is out of supported range, will continue encoding unsetting this feature as fallback.\n");
|
||||
d3d12_video_encoder_disable_rc_qualitylevels(pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]);
|
||||
}
|
||||
|
|
@ -1828,7 +1785,7 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
}
|
||||
|
||||
/* Try fallback for multi-slice/tile not supported with single subregion mode */
|
||||
if ((capEncoderSupportData1.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_SUBREGION_LAYOUT_MODE_NOT_SUPPORTED) != 0) {
|
||||
if ((capEncoderSupportData.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_SUBREGION_LAYOUT_MODE_NOT_SUPPORTED) != 0) {
|
||||
pD3D12Enc->m_currentEncodeConfig.m_encoderSliceConfigMode = D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_FULL_FRAME;
|
||||
debug_printf("[d3d12_video_encoder] WARNING: Requested slice/tile mode not supported by driver, will continue encoding with single subregion encoding.\n");
|
||||
}
|
||||
|
|
@ -1836,9 +1793,9 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
///
|
||||
/// Try fallback configuration
|
||||
///
|
||||
configSupported = d3d12_video_encoder_query_d3d12_driver_caps(pD3D12Enc, /*inout*/ capEncoderSupportData1)
|
||||
&& (((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_GENERAL_SUPPORT_OK) != 0)
|
||||
&& (capEncoderSupportData1.ValidationFlags == D3D12_VIDEO_ENCODER_VALIDATION_FLAG_NONE));
|
||||
configSupported = d3d12_video_encoder_query_d3d12_driver_caps(pD3D12Enc, /*inout*/ capEncoderSupportData)
|
||||
&& (((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_GENERAL_SUPPORT_OK) != 0)
|
||||
&& (capEncoderSupportData.ValidationFlags == D3D12_VIDEO_ENCODER_VALIDATION_FLAG_NONE));
|
||||
}
|
||||
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_IntraRefresh.IntraRefreshDuration >
|
||||
|
|
@ -1848,64 +1805,60 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
"reported IR duration %d in query caps) for current resolution.\n",
|
||||
pD3D12Enc->m_currentEncodeConfig.m_IntraRefresh.IntraRefreshDuration,
|
||||
pD3D12Enc->m_currentEncodeCapabilities.m_currentResolutionSupportCaps.MaxIntraRefreshFrameDuration);
|
||||
capEncoderSupportData1.ValidationFlags |= D3D12_VIDEO_ENCODER_VALIDATION_FLAG_INTRA_REFRESH_MODE_NOT_SUPPORTED;
|
||||
capEncoderSupportData.ValidationFlags |= D3D12_VIDEO_ENCODER_VALIDATION_FLAG_INTRA_REFRESH_MODE_NOT_SUPPORTED;
|
||||
configSupported = false;
|
||||
}
|
||||
|
||||
if(!configSupported) {
|
||||
debug_printf("[d3d12_video_encoder] Cap negotiation failed, see more details below:\n");
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_CODEC_NOT_SUPPORTED) != 0) {
|
||||
if ((capEncoderSupportData.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_CODEC_NOT_SUPPORTED) != 0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested codec is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags &
|
||||
if ((capEncoderSupportData.ValidationFlags &
|
||||
D3D12_VIDEO_ENCODER_VALIDATION_FLAG_RESOLUTION_NOT_SUPPORTED_IN_LIST) != 0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested resolution is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags &
|
||||
if ((capEncoderSupportData.ValidationFlags &
|
||||
D3D12_VIDEO_ENCODER_VALIDATION_FLAG_RATE_CONTROL_CONFIGURATION_NOT_SUPPORTED) != 0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested bitrate or rc config is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags &
|
||||
if ((capEncoderSupportData.ValidationFlags &
|
||||
D3D12_VIDEO_ENCODER_VALIDATION_FLAG_CODEC_CONFIGURATION_NOT_SUPPORTED) != 0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested codec config is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags &
|
||||
if ((capEncoderSupportData.ValidationFlags &
|
||||
D3D12_VIDEO_ENCODER_VALIDATION_FLAG_RATE_CONTROL_MODE_NOT_SUPPORTED) != 0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested rate control mode is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags &
|
||||
if ((capEncoderSupportData.ValidationFlags &
|
||||
D3D12_VIDEO_ENCODER_VALIDATION_FLAG_INTRA_REFRESH_MODE_NOT_SUPPORTED) != 0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested intra refresh config is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags &
|
||||
if ((capEncoderSupportData.ValidationFlags &
|
||||
D3D12_VIDEO_ENCODER_VALIDATION_FLAG_SUBREGION_LAYOUT_MODE_NOT_SUPPORTED) != 0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested subregion layout mode is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_INPUT_FORMAT_NOT_SUPPORTED) !=
|
||||
if ((capEncoderSupportData.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_INPUT_FORMAT_NOT_SUPPORTED) !=
|
||||
0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested input dxgi format is not supported\n");
|
||||
}
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if ((capEncoderSupportData1.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_DIRTY_REGIONS_NOT_SUPPORTED ) !=
|
||||
if ((capEncoderSupportData.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_DIRTY_REGIONS_NOT_SUPPORTED ) !=
|
||||
0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested input dirty regions is not supported\n");
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_FRAME_ANALYSIS_NOT_SUPPORTED ) !=
|
||||
if ((capEncoderSupportData.ValidationFlags & D3D12_VIDEO_ENCODER_VALIDATION_FLAG_FRAME_ANALYSIS_NOT_SUPPORTED ) !=
|
||||
0) {
|
||||
debug_printf("[d3d12_video_encoder] Requested two pass encode is not supported\n");
|
||||
}
|
||||
#else
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
if (!pD3D12Enc->m_prevFrameEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]
|
||||
|
|
@ -1916,91 +1869,84 @@ bool d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(stru
|
|||
return configSupported;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc, D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData1) {
|
||||
#else
|
||||
bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc, D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 &capEncoderSupportData1) {
|
||||
#endif
|
||||
capEncoderSupportData1.NodeIndex = pD3D12Enc->m_NodeIndex;
|
||||
capEncoderSupportData1.Codec = d3d12_video_encoder_get_current_codec(pD3D12Enc);
|
||||
capEncoderSupportData1.InputFormat = pD3D12Enc->m_currentEncodeConfig.m_encodeFormatInfo.Format;
|
||||
capEncoderSupportData1.RateControl = d3d12_video_encoder_get_current_rate_control_settings(pD3D12Enc);
|
||||
capEncoderSupportData1.IntraRefresh = pD3D12Enc->m_currentEncodeConfig.m_IntraRefresh.Mode;
|
||||
capEncoderSupportData1.SubregionFrameEncoding = pD3D12Enc->m_currentEncodeConfig.m_encoderSliceConfigMode;
|
||||
capEncoderSupportData1.ResolutionsListCount = 1;
|
||||
capEncoderSupportData1.pResolutionList = &pD3D12Enc->m_currentEncodeConfig.m_currentResolution;
|
||||
capEncoderSupportData1.CodecGopSequence = d3d12_video_encoder_get_current_gop_desc(pD3D12Enc);
|
||||
capEncoderSupportData1.MaxReferenceFramesInDPB =
|
||||
bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc, D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData) {
|
||||
capEncoderSupportData.NodeIndex = pD3D12Enc->m_NodeIndex;
|
||||
capEncoderSupportData.Codec = d3d12_video_encoder_get_current_codec(pD3D12Enc);
|
||||
capEncoderSupportData.InputFormat = pD3D12Enc->m_currentEncodeConfig.m_encodeFormatInfo.Format;
|
||||
capEncoderSupportData.RateControl = d3d12_video_encoder_get_current_rate_control_settings(pD3D12Enc);
|
||||
capEncoderSupportData.IntraRefresh = pD3D12Enc->m_currentEncodeConfig.m_IntraRefresh.Mode;
|
||||
capEncoderSupportData.SubregionFrameEncoding = pD3D12Enc->m_currentEncodeConfig.m_encoderSliceConfigMode;
|
||||
capEncoderSupportData.ResolutionsListCount = 1;
|
||||
capEncoderSupportData.pResolutionList = &pD3D12Enc->m_currentEncodeConfig.m_currentResolution;
|
||||
capEncoderSupportData.CodecGopSequence = d3d12_video_encoder_get_current_gop_desc(pD3D12Enc);
|
||||
capEncoderSupportData.MaxReferenceFramesInDPB =
|
||||
std::max(2u, d3d12_video_encoder_get_current_max_dpb_capacity(pD3D12Enc)) - 1u; // we only want the number of references (not the current pic slot too)
|
||||
capEncoderSupportData1.CodecConfiguration = d3d12_video_encoder_get_current_codec_config_desc(pD3D12Enc);
|
||||
capEncoderSupportData.CodecConfiguration = d3d12_video_encoder_get_current_codec_config_desc(pD3D12Enc);
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
// Set dirty regions input info to cap
|
||||
capEncoderSupportData1.DirtyRegions.MapSource = pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapSource;
|
||||
capEncoderSupportData1.DirtyRegions.Enabled = d3d12_video_encoder_is_dirty_regions_feature_enabled(pD3D12Enc, pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapSource);
|
||||
if (capEncoderSupportData1.DirtyRegions.Enabled)
|
||||
capEncoderSupportData.DirtyRegions.MapSource = pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapSource;
|
||||
capEncoderSupportData.DirtyRegions.Enabled = d3d12_video_encoder_is_dirty_regions_feature_enabled(pD3D12Enc, pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapSource);
|
||||
if (capEncoderSupportData.DirtyRegions.Enabled)
|
||||
{
|
||||
capEncoderSupportData1.DirtyRegions.MapValuesType = (capEncoderSupportData1.DirtyRegions.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER) ?
|
||||
capEncoderSupportData.DirtyRegions.MapValuesType = (capEncoderSupportData.DirtyRegions.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER) ?
|
||||
pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.RectsInfo.MapValuesType :
|
||||
pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapInfo.MapValuesType;
|
||||
}
|
||||
|
||||
d3d12_video_encoder_is_gpu_qmap_input_feature_enabled(pD3D12Enc, /*output param*/ capEncoderSupportData1.QPMap.Enabled, /*output param*/ capEncoderSupportData1.QPMap.MapSource);
|
||||
d3d12_video_encoder_is_gpu_qmap_input_feature_enabled(pD3D12Enc, /*output param*/ capEncoderSupportData.QPMap.Enabled, /*output param*/ capEncoderSupportData.QPMap.MapSource);
|
||||
|
||||
capEncoderSupportData1.MotionSearch.MapSource = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapSource;
|
||||
capEncoderSupportData1.MotionSearch.Enabled = d3d12_video_encoder_is_move_regions_feature_enabled(pD3D12Enc, pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapSource);
|
||||
if (capEncoderSupportData1.MotionSearch.Enabled)
|
||||
capEncoderSupportData.MotionSearch.MapSource = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapSource;
|
||||
capEncoderSupportData.MotionSearch.Enabled = d3d12_video_encoder_is_move_regions_feature_enabled(pD3D12Enc, pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapSource);
|
||||
if (capEncoderSupportData.MotionSearch.Enabled)
|
||||
{
|
||||
capEncoderSupportData1.MotionSearch.MotionSearchMode = D3D12_VIDEO_ENCODER_FRAME_MOTION_SEARCH_MODE_FULL_SEARCH;
|
||||
capEncoderSupportData1.MotionSearch.BidirectionalRefFrameEnabled = TRUE;
|
||||
capEncoderSupportData.MotionSearch.MotionSearchMode = D3D12_VIDEO_ENCODER_FRAME_MOTION_SEARCH_MODE_FULL_SEARCH;
|
||||
capEncoderSupportData.MotionSearch.BidirectionalRefFrameEnabled = TRUE;
|
||||
}
|
||||
|
||||
capEncoderSupportData1.FrameAnalysis.Enabled = pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc.AppRequested;
|
||||
if (capEncoderSupportData1.FrameAnalysis.Enabled)
|
||||
capEncoderSupportData.FrameAnalysis.Enabled = pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc.AppRequested;
|
||||
if (capEncoderSupportData.FrameAnalysis.Enabled)
|
||||
{
|
||||
capEncoderSupportData1.FrameAnalysis.Pow2DownscaleFactor = pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc.Pow2DownscaleFactor;
|
||||
capEncoderSupportData.FrameAnalysis.Pow2DownscaleFactor = pD3D12Enc->m_currentEncodeConfig.m_TwoPassEncodeDesc.Pow2DownscaleFactor;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
enum pipe_video_format codec = u_reduce_video_profile(pD3D12Enc->base.profile);
|
||||
switch (codec) {
|
||||
#if VIDEO_CODEC_H264ENC
|
||||
case PIPE_VIDEO_FORMAT_MPEG4_AVC:
|
||||
{
|
||||
capEncoderSupportData1.SuggestedProfile.pH264Profile =
|
||||
capEncoderSupportData.SuggestedProfile.pH264Profile =
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_encoderSuggestedProfileDesc.m_H264Profile;
|
||||
capEncoderSupportData1.SuggestedProfile.DataSize =
|
||||
capEncoderSupportData.SuggestedProfile.DataSize =
|
||||
sizeof(pD3D12Enc->m_currentEncodeCapabilities.m_encoderSuggestedProfileDesc.m_H264Profile);
|
||||
capEncoderSupportData1.SuggestedLevel.pH264LevelSetting =
|
||||
capEncoderSupportData.SuggestedLevel.pH264LevelSetting =
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_encoderLevelSuggestedDesc.m_H264LevelSetting;
|
||||
capEncoderSupportData1.SuggestedLevel.DataSize =
|
||||
capEncoderSupportData.SuggestedLevel.DataSize =
|
||||
sizeof(pD3D12Enc->m_currentEncodeCapabilities.m_encoderLevelSuggestedDesc.m_H264LevelSetting);
|
||||
} break;
|
||||
#endif
|
||||
#if VIDEO_CODEC_H265ENC
|
||||
case PIPE_VIDEO_FORMAT_HEVC:
|
||||
{
|
||||
capEncoderSupportData1.SuggestedProfile.pHEVCProfile =
|
||||
capEncoderSupportData.SuggestedProfile.pHEVCProfile =
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_encoderSuggestedProfileDesc.m_HEVCProfile;
|
||||
capEncoderSupportData1.SuggestedProfile.DataSize =
|
||||
capEncoderSupportData.SuggestedProfile.DataSize =
|
||||
sizeof(pD3D12Enc->m_currentEncodeCapabilities.m_encoderSuggestedProfileDesc.m_HEVCProfile);
|
||||
capEncoderSupportData1.SuggestedLevel.pHEVCLevelSetting =
|
||||
capEncoderSupportData.SuggestedLevel.pHEVCLevelSetting =
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_encoderLevelSuggestedDesc.m_HEVCLevelSetting;
|
||||
capEncoderSupportData1.SuggestedLevel.DataSize =
|
||||
capEncoderSupportData.SuggestedLevel.DataSize =
|
||||
sizeof(pD3D12Enc->m_currentEncodeCapabilities.m_encoderLevelSuggestedDesc.m_HEVCLevelSetting);
|
||||
} break;
|
||||
#endif
|
||||
#if VIDEO_CODEC_AV1ENC
|
||||
case PIPE_VIDEO_FORMAT_AV1:
|
||||
{
|
||||
capEncoderSupportData1.SuggestedProfile.pAV1Profile =
|
||||
capEncoderSupportData.SuggestedProfile.pAV1Profile =
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_encoderSuggestedProfileDesc.m_AV1Profile;
|
||||
capEncoderSupportData1.SuggestedProfile.DataSize =
|
||||
capEncoderSupportData.SuggestedProfile.DataSize =
|
||||
sizeof(pD3D12Enc->m_currentEncodeCapabilities.m_encoderSuggestedProfileDesc.m_AV1Profile);
|
||||
capEncoderSupportData1.SuggestedLevel.pAV1LevelSetting =
|
||||
capEncoderSupportData.SuggestedLevel.pAV1LevelSetting =
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_encoderLevelSuggestedDesc.m_AV1LevelSetting;
|
||||
capEncoderSupportData1.SuggestedLevel.DataSize =
|
||||
capEncoderSupportData.SuggestedLevel.DataSize =
|
||||
sizeof(pD3D12Enc->m_currentEncodeCapabilities.m_encoderLevelSuggestedDesc.m_AV1LevelSetting);
|
||||
} break;
|
||||
#endif
|
||||
|
|
@ -2011,14 +1957,13 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
}
|
||||
|
||||
// prepare inout storage for the resolution dependent result.
|
||||
capEncoderSupportData1.pResolutionDependentSupport =
|
||||
capEncoderSupportData.pResolutionDependentSupport =
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_currentResolutionSupportCaps;
|
||||
|
||||
capEncoderSupportData1.SubregionFrameEncodingData = d3d12_video_encoder_get_current_slice_param_settings(pD3D12Enc);
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
capEncoderSupportData.SubregionFrameEncodingData = d3d12_video_encoder_get_current_slice_param_settings(pD3D12Enc);
|
||||
HRESULT hr = pD3D12Enc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT2,
|
||||
&capEncoderSupportData1,
|
||||
sizeof(capEncoderSupportData1));
|
||||
&capEncoderSupportData,
|
||||
sizeof(capEncoderSupportData));
|
||||
|
||||
if (FAILED(hr)) {
|
||||
debug_printf("CheckFeatureSupport D3D12_FEATURE_VIDEO_ENCODER_SUPPORT2 failed with HR %x\n", (unsigned)hr);
|
||||
|
|
@ -2026,25 +1971,19 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
|
||||
// D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 extends D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1
|
||||
// in a binary compatible way, so just cast it and try with the older query D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1*>(&capEncoderSupportData1);
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1*>(&capEncoderSupportData);
|
||||
hr = pD3D12Enc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1,
|
||||
casted_down_cap_data,
|
||||
sizeof(D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1));
|
||||
}
|
||||
|
||||
#else
|
||||
HRESULT hr = pD3D12Enc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1,
|
||||
&capEncoderSupportData1,
|
||||
sizeof(capEncoderSupportData1));
|
||||
#endif
|
||||
|
||||
if (FAILED(hr)) {
|
||||
debug_printf("CheckFeatureSupport D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1 failed with HR %x\n", (unsigned)hr);
|
||||
debug_printf("Falling back to check previous query version D3D12_FEATURE_VIDEO_ENCODER_SUPPORT...\n");
|
||||
|
||||
// D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 extends D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT
|
||||
// in a binary compatible way, so just cast it and try with the older query D3D12_FEATURE_VIDEO_ENCODER_SUPPORT
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT*>(&capEncoderSupportData1);
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT*>(&capEncoderSupportData);
|
||||
|
||||
//
|
||||
// Remove legacy query parameters for features not supported in older OS when using older OS support query
|
||||
|
|
@ -2057,7 +1996,7 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
d3d12_video_encoder_disable_rc_qualitylevels(
|
||||
pD3D12Enc->m_currentEncodeConfig.m_encoderRateControlDesc[pD3D12Enc->m_currentEncodeConfig.m_activeRateControlIndex]);
|
||||
|
||||
capEncoderSupportData1.RateControl = d3d12_video_encoder_get_current_rate_control_settings(pD3D12Enc);
|
||||
capEncoderSupportData.RateControl = d3d12_video_encoder_get_current_rate_control_settings(pD3D12Enc);
|
||||
|
||||
hr = pD3D12Enc->m_spD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT,
|
||||
casted_down_cap_data,
|
||||
|
|
@ -2072,18 +2011,17 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
// and having issues with encoder state/heap objects recreation
|
||||
if (pD3D12Enc->m_pD3D12Screen->vendor_id == 0x8086 /* HW_VENDOR_INTEL */) {
|
||||
// If IHV driver doesn't report reconfiguration, force doing the reconfiguration without object recreation
|
||||
if ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_RECONFIGURATION_AVAILABLE) == 0) {
|
||||
if ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_RECONFIGURATION_AVAILABLE) == 0) {
|
||||
pD3D12Enc->driver_workarounds |= d3d12_video_encoder_driver_workaround_rate_control_reconfig;
|
||||
capEncoderSupportData1.SupportFlags |= D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_RECONFIGURATION_AVAILABLE;
|
||||
capEncoderSupportData.SupportFlags |= D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_RECONFIGURATION_AVAILABLE;
|
||||
}
|
||||
}
|
||||
|
||||
pD3D12Enc->m_currentEncodeCapabilities.m_SupportFlags = capEncoderSupportData1.SupportFlags;
|
||||
pD3D12Enc->m_currentEncodeCapabilities.m_ValidationFlags = capEncoderSupportData1.ValidationFlags;
|
||||
pD3D12Enc->m_currentEncodeCapabilities.m_SupportFlags = capEncoderSupportData.SupportFlags;
|
||||
pD3D12Enc->m_currentEncodeCapabilities.m_ValidationFlags = capEncoderSupportData.ValidationFlags;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if ((capEncoderSupportData1.DirtyRegions.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE) &&
|
||||
(capEncoderSupportData1.DirtyRegions.Enabled))
|
||||
if ((capEncoderSupportData.DirtyRegions.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE) &&
|
||||
(capEncoderSupportData.DirtyRegions.Enabled))
|
||||
{
|
||||
// Query specifics of staging resource for dirty regions
|
||||
pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc.MapInfo.capInputLayoutDirtyRegion =
|
||||
|
|
@ -2092,15 +2030,15 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
0u,
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SESSION_INFO SessionInfo;
|
||||
{
|
||||
capEncoderSupportData1.Codec,
|
||||
capEncoderSupportData.Codec,
|
||||
d3d12_video_encoder_get_current_profile_desc(pD3D12Enc),
|
||||
d3d12_video_encoder_get_current_level_desc(pD3D12Enc),
|
||||
pD3D12Enc->m_currentEncodeConfig.m_encodeFormatInfo.Format,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC
|
||||
pD3D12Enc->m_currentEncodeConfig.m_currentResolution,
|
||||
d3d12_video_encoder_get_current_codec_config_desc(pD3D12Enc),
|
||||
capEncoderSupportData1.SubregionFrameEncoding,
|
||||
capEncoderSupportData1.SubregionFrameEncodingData
|
||||
capEncoderSupportData.SubregionFrameEncoding,
|
||||
capEncoderSupportData.SubregionFrameEncodingData
|
||||
},
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_TYPE MapType;
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_TYPE_DIRTY_REGIONS,
|
||||
|
|
@ -2120,8 +2058,8 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
}
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.QPMap.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE) &&
|
||||
(capEncoderSupportData1.QPMap.Enabled))
|
||||
if ((capEncoderSupportData.QPMap.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE) &&
|
||||
(capEncoderSupportData.QPMap.Enabled))
|
||||
{
|
||||
// Query specifics of staging resource for QPMap regions
|
||||
pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.GPUInput.capInputLayoutQPMap =
|
||||
|
|
@ -2130,15 +2068,15 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
0u,
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SESSION_INFO SessionInfo;
|
||||
{
|
||||
capEncoderSupportData1.Codec,
|
||||
capEncoderSupportData.Codec,
|
||||
d3d12_video_encoder_get_current_profile_desc(pD3D12Enc),
|
||||
d3d12_video_encoder_get_current_level_desc(pD3D12Enc),
|
||||
pD3D12Enc->m_currentEncodeConfig.m_encodeFormatInfo.Format,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC
|
||||
pD3D12Enc->m_currentEncodeConfig.m_currentResolution,
|
||||
d3d12_video_encoder_get_current_codec_config_desc(pD3D12Enc),
|
||||
capEncoderSupportData1.SubregionFrameEncoding,
|
||||
capEncoderSupportData1.SubregionFrameEncodingData
|
||||
capEncoderSupportData.SubregionFrameEncoding,
|
||||
capEncoderSupportData.SubregionFrameEncodingData
|
||||
},
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_TYPE MapType;
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_TYPE_QUANTIZATION_MATRIX,
|
||||
|
|
@ -2158,8 +2096,8 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
}
|
||||
}
|
||||
|
||||
if ((capEncoderSupportData1.MotionSearch.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE) &&
|
||||
(capEncoderSupportData1.MotionSearch.Enabled))
|
||||
if ((capEncoderSupportData.MotionSearch.MapSource == D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE) &&
|
||||
(capEncoderSupportData.MotionSearch.Enabled))
|
||||
{
|
||||
// Query specifics of staging resource for move regions
|
||||
pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.capInputLayoutMotionVectors =
|
||||
|
|
@ -2168,15 +2106,15 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
0u,
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SESSION_INFO SessionInfo;
|
||||
{
|
||||
capEncoderSupportData1.Codec,
|
||||
capEncoderSupportData.Codec,
|
||||
d3d12_video_encoder_get_current_profile_desc(pD3D12Enc),
|
||||
d3d12_video_encoder_get_current_level_desc(pD3D12Enc),
|
||||
pD3D12Enc->m_currentEncodeConfig.m_encodeFormatInfo.Format,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC
|
||||
pD3D12Enc->m_currentEncodeConfig.m_currentResolution,
|
||||
d3d12_video_encoder_get_current_codec_config_desc(pD3D12Enc),
|
||||
capEncoderSupportData1.SubregionFrameEncoding,
|
||||
capEncoderSupportData1.SubregionFrameEncodingData
|
||||
capEncoderSupportData.SubregionFrameEncoding,
|
||||
capEncoderSupportData.SubregionFrameEncodingData
|
||||
},
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_TYPE MapType;
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_TYPE_MOTION_VECTORS,
|
||||
|
|
@ -2195,7 +2133,6 @@ bool d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3
|
|||
return false;
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
@ -2294,12 +2231,10 @@ d3d12_video_encoder_update_output_stats_resources(struct d3d12_video_encoder *pD
|
|||
struct pipe_resource* rcbitsmap,
|
||||
struct pipe_resource* psnrmap)
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
pD3D12Enc->m_currentEncodeConfig.m_GPUQPStatsResource = d3d12_resource(qpmap);
|
||||
pD3D12Enc->m_currentEncodeConfig.m_GPUSATDStatsResource = d3d12_resource(satdmap);
|
||||
pD3D12Enc->m_currentEncodeConfig.m_GPURCBitAllocationStatsResource = d3d12_resource(rcbitsmap);
|
||||
pD3D12Enc->m_currentEncodeConfig.m_GPUPSNRAllocationStatsResource = d3d12_resource(psnrmap);
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
|
||||
bool
|
||||
|
|
@ -2372,14 +2307,12 @@ d3d12_video_encoder_update_current_encoder_config_state(struct d3d12_video_encod
|
|||
} break;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
// Set dirty region changes
|
||||
if (memcmp(&pD3D12Enc->m_prevFrameEncodeConfig.m_DirtyRectsDesc,
|
||||
&pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc,
|
||||
sizeof(pD3D12Enc->m_currentEncodeConfig.m_DirtyRectsDesc)) != 0) {
|
||||
pD3D12Enc->m_currentEncodeConfig.m_ConfigDirtyFlags |= d3d12_video_encoder_config_dirty_flag_dirty_regions;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
return bCodecUpdatesSuccess;
|
||||
}
|
||||
|
|
@ -2390,10 +2323,8 @@ d3d12_video_encoder_create_command_objects(struct d3d12_video_encoder *pD3D12Enc
|
|||
assert(pD3D12Enc->m_spD3D12VideoDevice);
|
||||
|
||||
D3D12_COMMAND_QUEUE_DESC commandQueueDesc = { D3D12_COMMAND_LIST_TYPE_VIDEO_ENCODE };
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
if (pD3D12Enc->m_pD3D12Screen->supports_dynamic_queue_priority)
|
||||
commandQueueDesc.Flags |= D3D12_COMMAND_QUEUE_FLAG_ALLOW_DYNAMIC_PRIORITY;
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
HRESULT hr = pD3D12Enc->m_pD3D12Screen->dev->CreateCommandQueue(
|
||||
&commandQueueDesc,
|
||||
IID_PPV_ARGS(pD3D12Enc->m_spEncodeCommandQueue.GetAddressOf()));
|
||||
|
|
@ -2590,8 +2521,6 @@ d3d12_video_encoder_create_encoder(struct pipe_context *context, const struct pi
|
|||
PIPE_VIDEO_CAP_ENC_SLICED_NOTIFICATIONS);
|
||||
d3d12_video_encoder_initialize_two_pass(pD3D12Enc, codec->two_pass);
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
if (pD3D12Ctx->priority_manager)
|
||||
{
|
||||
// Register queue with priority manager
|
||||
|
|
@ -2603,8 +2532,6 @@ d3d12_video_encoder_create_encoder(struct pipe_context *context, const struct pi
|
|||
}
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
return &pD3D12Enc->base;
|
||||
|
||||
failed:
|
||||
|
|
@ -2630,7 +2557,6 @@ d3d12_video_encoder_prepare_output_buffers(struct d3d12_video_encoder *pD3D12Enc
|
|||
pD3D12Enc->m_currentEncodeCapabilities.m_ResourceRequirementsCaps.PictureTargetResolution =
|
||||
pD3D12Enc->m_currentEncodeConfig.m_currentResolution;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
// Assume all stats supported by the driver will be required and use max allocation to avoid reallocating between frames
|
||||
pD3D12Enc->m_currentEncodeCapabilities.m_ResourceRequirementsCaps.OptionalMetadata = D3D12_VIDEO_ENCODER_OPTIONAL_METADATA_ENABLE_FLAG_NONE;
|
||||
|
||||
|
|
@ -2676,12 +2602,6 @@ d3d12_video_encoder_prepare_output_buffers(struct d3d12_video_encoder *pD3D12Enc
|
|||
casted_down_cap_data,
|
||||
sizeof(*casted_down_cap_data));
|
||||
}
|
||||
#else
|
||||
HRESULT hr = pD3D12Enc->m_spD3D12VideoDevice->CheckFeatureSupport(
|
||||
D3D12_FEATURE_VIDEO_ENCODER_RESOURCE_REQUIREMENTS,
|
||||
&pD3D12Enc->m_currentEncodeCapabilities.m_ResourceRequirementsCaps,
|
||||
sizeof(pD3D12Enc->m_currentEncodeCapabilities.m_ResourceRequirementsCaps));
|
||||
#endif
|
||||
|
||||
if (FAILED(hr)) {
|
||||
debug_printf("CheckFeatureSupport failed with HR %x\n", (unsigned)hr);
|
||||
|
|
@ -2753,7 +2673,6 @@ d3d12_video_encoder_prepare_input_buffers(struct d3d12_video_encoder *pD3D12Enc)
|
|||
// and create them on demand (if the previous allocation is not big enough)
|
||||
|
||||
HRESULT hr = S_OK;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_HEAP_PROPERTIES Properties = CD3DX12_HEAP_PROPERTIES(D3D12_HEAP_TYPE_DEFAULT);
|
||||
if (d3d12_video_encoder_is_dirty_regions_feature_enabled(pD3D12Enc, D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE))
|
||||
{
|
||||
|
|
@ -2823,7 +2742,6 @@ d3d12_video_encoder_prepare_input_buffers(struct d3d12_video_encoder *pD3D12Enc)
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
return SUCCEEDED(hr);
|
||||
}
|
||||
|
||||
|
|
@ -2975,9 +2893,7 @@ d3d12_video_encoder_calculate_max_slices_count_in_output(
|
|||
maxSlices = 1u;
|
||||
} break;
|
||||
case D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_BYTES_PER_SUBREGION:
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
case D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_AUTO:
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
{
|
||||
maxSlices = MaxSubregionsNumberFromCaps;
|
||||
} break;
|
||||
|
|
@ -3522,7 +3438,7 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
}
|
||||
|
||||
// Update current frame pic params state after reconfiguring above.
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA currentPicParams =
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams =
|
||||
d3d12_video_encoder_get_current_picture_param_settings(pD3D12Enc);
|
||||
|
||||
if (!pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data(currentPicParams)) {
|
||||
|
|
@ -3567,17 +3483,16 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
}
|
||||
#endif
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
ComPtr<ID3D12VideoEncodeCommandList4> spEncodeCommandList4;
|
||||
if (SUCCEEDED(pD3D12Enc->m_spEncodeCommandList->QueryInterface(
|
||||
IID_PPV_ARGS(spEncodeCommandList4.GetAddressOf())))) {
|
||||
|
||||
// Update current frame pic params state after reconfiguring above.
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams1 =
|
||||
d3d12_video_encoder_get_current_picture_param_settings1(pD3D12Enc);
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams =
|
||||
d3d12_video_encoder_get_current_picture_param_settings(pD3D12Enc);
|
||||
|
||||
if (!pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data1(currentPicParams1)) {
|
||||
debug_printf("[d3d12_video_encoder_encode_bitstream] get_current_frame_picture_control_data1 failed!\n");
|
||||
if (!pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data(currentPicParams)) {
|
||||
debug_printf("[d3d12_video_encoder_encode_bitstream] get_current_frame_picture_control_data failed!\n");
|
||||
pD3D12Enc->m_inflightResourcesPool[d3d12_video_encoder_pool_current_index(pD3D12Enc)].encode_result = PIPE_VIDEO_FEEDBACK_METADATA_ENCODE_FLAG_FAILED;
|
||||
pD3D12Enc->m_spEncodedFrameMetadata[d3d12_video_encoder_metadata_current_index(pD3D12Enc)].encode_result = PIPE_VIDEO_FEEDBACK_METADATA_ENCODE_FLAG_FAILED;
|
||||
assert(false);
|
||||
|
|
@ -3725,7 +3640,7 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
ResolveInputData.MotionVectors.pMotionVectorMapsSubresources = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.pMotionVectorMapsSubresources;
|
||||
ResolveInputData.MotionVectors.pMotionVectorMapsMetadataSubresources = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.pMotionVectorMapsMetadataSubresources;
|
||||
ResolveInputData.MotionVectors.MotionUnitPrecision = pD3D12Enc->m_currentEncodeConfig.m_MoveRectsDesc.MapInfo.MotionUnitPrecision;
|
||||
ResolveInputData.MotionVectors.PictureControlConfiguration = currentPicParams1;
|
||||
ResolveInputData.MotionVectors.PictureControlConfiguration = currentPicParams;
|
||||
|
||||
D3D12_VIDEO_ENCODER_RESOLVE_INPUT_PARAM_LAYOUT_INPUT_ARGUMENTS resolveInputParamLayoutInput =
|
||||
{
|
||||
|
|
@ -3899,7 +3814,7 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_FLAGS Flags;
|
||||
picCtrlFlags,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 PictureControlCodecData;
|
||||
currentPicParams1,
|
||||
currentPicParams,
|
||||
// D3D12_VIDEO_ENCODE_REFERENCE_FRAMES ReferenceFrames;
|
||||
referenceFramesDescriptor,
|
||||
// D3D12_VIDEO_ENCODER_FRAME_MOTION_VECTORS MotionVectors;
|
||||
|
|
@ -3984,7 +3899,7 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].ppResolvedSubregionSizes.resize(num_slice_objects, 0u);
|
||||
pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].ppResolvedSubregionOffsets.resize(num_slice_objects, 0u);
|
||||
D3D12_HEAP_PROPERTIES Properties = CD3DX12_HEAP_PROPERTIES(D3D12_HEAP_TYPE_DEFAULT);
|
||||
HRESULT hr = S_OK;
|
||||
[[maybe_unused]] HRESULT hr = S_OK;
|
||||
pSlicedEncodingExtraBarriers.resize(num_slice_objects);
|
||||
for (uint32_t i = 0; i < num_slice_objects;i++)
|
||||
{
|
||||
|
|
@ -4030,10 +3945,6 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
hr = pD3D12Enc->m_pD3D12Screen->dev->CreateFence(0, D3D12_FENCE_FLAG_NONE, IID_PPV_ARGS(&pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].pspSubregionFences[i]));
|
||||
pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].ppSubregionFences[i] = pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].pspSubregionFences[i].Get();
|
||||
|
||||
memset(&pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].pSubregionPipeFences[i],
|
||||
0,
|
||||
sizeof(pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].pSubregionPipeFences[i]));
|
||||
|
||||
pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].pSubregionPipeFences[i].reset(
|
||||
d3d12_create_fence_raw(pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].pspSubregionFences[i].Get(),
|
||||
pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].ppSubregionFenceValues[i]));
|
||||
|
|
@ -4253,8 +4164,9 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
pTwoPassExtraBarriers.data());
|
||||
}
|
||||
else
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA currentPicParamsLegacy =
|
||||
d3d12_video_encoder_get_current_picture_param_settings_legacy(pD3D12Enc);
|
||||
const D3D12_VIDEO_ENCODER_ENCODEFRAME_INPUT_ARGUMENTS inputStreamArguments = {
|
||||
// D3D12_VIDEO_ENCODER_SEQUENCE_CONTROL_DESC
|
||||
{ // D3D12_VIDEO_ENCODER_SEQUENCE_CONTROL_FLAGS
|
||||
|
|
@ -4273,7 +4185,7 @@ d3d12_video_encoder_encode_bitstream_impl(struct pipe_video_codec *codec,
|
|||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_FLAGS Flags;
|
||||
picCtrlFlags,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA PictureControlCodecData;
|
||||
currentPicParams,
|
||||
currentPicParamsLegacy,
|
||||
// D3D12_VIDEO_ENCODE_REFERENCE_FRAMES ReferenceFrames;
|
||||
referenceFramesDescriptor
|
||||
},
|
||||
|
|
@ -4482,9 +4394,7 @@ d3d12_video_encoder_get_feedback(struct pipe_video_codec *codec,
|
|||
// Re-pack slices with any extra slice headers
|
||||
// if we are in full frame notification mode (otherwise each slice buffer packs independently)
|
||||
//
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot].SubregionNotificationMode == D3D12_VIDEO_ENCODER_COMPRESSED_BITSTREAM_NOTIFICATION_MODE_FULL_FRAME)
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
{
|
||||
// Only repack if any slice has any headers to write
|
||||
uint32_t num_slice_headers = 0u;
|
||||
|
|
@ -4791,9 +4701,7 @@ d3d12_video_encoder_extract_encode_metadata(
|
|||
reinterpret_cast<D3D12_VIDEO_ENCODER_FRAME_SUBREGION_METADATA *>(reinterpret_cast<uint8_t *>(pMetadataBufferSrc) +
|
||||
encoderMetadataSize);
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (raw_metadata.SubregionNotificationMode == D3D12_VIDEO_ENCODER_COMPRESSED_BITSTREAM_NOTIFICATION_MODE_FULL_FRAME)
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
{
|
||||
// Copy fields into D3D12_VIDEO_ENCODER_FRAME_SUBREGION_METADATA
|
||||
assert(parsedMetadata.WrittenSubregionsCount < SIZE_MAX);
|
||||
|
|
@ -4804,7 +4712,6 @@ d3d12_video_encoder_extract_encode_metadata(
|
|||
pSubregionsMetadata[sliceIdx].bStartOffset = pFrameSubregionMetadata[sliceIdx].bStartOffset;
|
||||
}
|
||||
}
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
else if (raw_metadata.SubregionNotificationMode == D3D12_VIDEO_ENCODER_COMPRESSED_BITSTREAM_NOTIFICATION_MODE_SUBREGIONS) {
|
||||
// Driver metadata doesn't have the subregions nor EncodedBitstreamWrittenBytesCount info on this case, let's get them from d3d12_video_encoder_get_slice_bitstream_data instead
|
||||
parsedMetadata.EncodedBitstreamWrittenBytesCount = 0u;
|
||||
|
|
@ -4835,7 +4742,6 @@ d3d12_video_encoder_extract_encode_metadata(
|
|||
parsedMetadata.EncodedBitstreamWrittenBytesCount += pSubregionsMetadata[sliceIdx].bSize;
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
// Unmap the buffer tmp storage
|
||||
pipe_buffer_unmap(pD3D12Enc->base.context, mapTransfer);
|
||||
|
|
|
|||
|
|
@ -175,11 +175,7 @@ struct D3D12EncodeCapabilities
|
|||
|
||||
D3D12_VIDEO_ENCODER_SUPPORT_FLAGS m_SupportFlags = {};
|
||||
D3D12_VIDEO_ENCODER_VALIDATION_FLAGS m_ValidationFlags = {};
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS1 m_currentResolutionSupportCaps = {};
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS m_currentResolutionSupportCaps = {};
|
||||
#endif
|
||||
union
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_PROFILE_H264 m_H264Profile;
|
||||
|
|
@ -208,11 +204,7 @@ struct D3D12EncodeCapabilities
|
|||
// The maximum number of slices that the output of the current frame to be encoded will contain
|
||||
uint32_t m_MaxSlicesInOutput = 0;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOURCE_REQUIREMENTS1 m_ResourceRequirementsCaps = {};
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOURCE_REQUIREMENTS m_ResourceRequirementsCaps = {};
|
||||
#endif
|
||||
};
|
||||
|
||||
struct D3D12EncodeRateControlState
|
||||
|
|
@ -325,11 +317,7 @@ struct D3D12EncodeConfiguration
|
|||
union
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264 m_H264PicData;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 m_HEVCPicData;
|
||||
#else
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC m_HEVCPicData;
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_CODEC_DATA m_AV1PicData;
|
||||
} m_encoderPicParamsDesc = {};
|
||||
|
||||
|
|
@ -351,10 +339,9 @@ struct D3D12EncodeConfiguration
|
|||
|
||||
bool m_bUsedAsReference; // Set if frame will be used as reference frame
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
struct{
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE MapSource;
|
||||
union {
|
||||
struct {
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER
|
||||
D3D12_VIDEO_ENCODER_DIRTY_RECT_INFO RectsInfo;
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE
|
||||
|
|
@ -393,23 +380,21 @@ struct D3D12EncodeConfiguration
|
|||
} m_QuantizationMatrixDesc = {};
|
||||
struct{
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE MapSource;
|
||||
struct { // union doesn't play well with std::vector
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER
|
||||
D3D12_VIDEO_ENCODER_MOVEREGION_INFO RectsInfo;
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE
|
||||
struct
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_FRAME_MOTION_SEARCH_MODE_CONFIG MotionSearchModeConfiguration;
|
||||
UINT NumHintsPerPixel;
|
||||
std::vector<ID3D12Resource*> ppMotionVectorMaps;
|
||||
UINT* pMotionVectorMapsSubresources;
|
||||
std::vector<ID3D12Resource*> ppMotionVectorMapsMetadata;
|
||||
UINT* pMotionVectorMapsMetadataSubresources;
|
||||
D3D12_VIDEO_ENCODER_FRAME_INPUT_MOTION_UNIT_PRECISION MotionUnitPrecision;
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 PictureControlConfiguration;
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLVE_INPUT_PARAM_LAYOUT capInputLayoutMotionVectors;
|
||||
} MapInfo;
|
||||
};
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER
|
||||
D3D12_VIDEO_ENCODER_MOVEREGION_INFO RectsInfo;
|
||||
// D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE
|
||||
struct
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_FRAME_MOTION_SEARCH_MODE_CONFIG MotionSearchModeConfiguration;
|
||||
UINT NumHintsPerPixel;
|
||||
std::vector<ID3D12Resource*> ppMotionVectorMaps;
|
||||
UINT* pMotionVectorMapsSubresources;
|
||||
std::vector<ID3D12Resource*> ppMotionVectorMapsMetadata;
|
||||
UINT* pMotionVectorMapsMetadataSubresources;
|
||||
D3D12_VIDEO_ENCODER_FRAME_INPUT_MOTION_UNIT_PRECISION MotionUnitPrecision;
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 PictureControlConfiguration;
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLVE_INPUT_PARAM_LAYOUT capInputLayoutMotionVectors;
|
||||
} MapInfo;
|
||||
} m_MoveRectsDesc = {};
|
||||
std::vector<RECT> m_DirtyRectsArray;
|
||||
std::vector<D3D12_VIDEO_ENCODER_MOVE_RECT> m_MoveRectsArray;
|
||||
|
|
@ -461,7 +446,6 @@ struct D3D12EncodeConfiguration
|
|||
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE FrameAnalysisReconstructedPictureOutput;
|
||||
|
||||
} m_TwoPassEncodeDesc = {};
|
||||
#endif
|
||||
};
|
||||
|
||||
struct EncodedBitstreamResolvedMetadata
|
||||
|
|
@ -516,9 +500,7 @@ struct EncodedBitstreamResolvedMetadata
|
|||
* stream from EncodeFrame.
|
||||
*/
|
||||
std::vector<ComPtr<ID3D12Resource>> spStagingBitstreams;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_COMPRESSED_BITSTREAM_NOTIFICATION_MODE SubregionNotificationMode;
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
std::vector<ComPtr<ID3D12Resource>> pspSubregionSizes;
|
||||
std::vector<ComPtr<ID3D12Resource>> pspSubregionOffsets;
|
||||
std::vector<ComPtr<ID3D12Fence>> pspSubregionFences;
|
||||
|
|
@ -673,12 +655,10 @@ bool
|
|||
d3d12_video_encoder_reconfigure_encoder_objects(struct d3d12_video_encoder *pD3D12Enc,
|
||||
struct pipe_video_buffer * srcTexture,
|
||||
struct pipe_picture_desc * picture);
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
|
||||
d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encoder *pD3D12Enc);
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1
|
||||
d3d12_video_encoder_get_current_picture_param_settings1(struct d3d12_video_encoder *pD3D12Enc);
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
d3d12_video_encoder_get_current_picture_param_settings(struct d3d12_video_encoder *pD3D12Enc);
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA
|
||||
d3d12_video_encoder_get_current_picture_param_settings_legacy(struct d3d12_video_encoder *pD3D12Enc);
|
||||
D3D12_VIDEO_ENCODER_LEVEL_SETTING
|
||||
d3d12_video_encoder_get_current_level_desc(struct d3d12_video_encoder *pD3D12Enc);
|
||||
D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION
|
||||
|
|
@ -730,18 +710,10 @@ d3d12_video_encoder_get_current_codec(struct d3d12_video_encoder *pD3D12Enc);
|
|||
|
||||
bool
|
||||
d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc,
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData);
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 &capEncoderSupportData);
|
||||
#endif
|
||||
bool
|
||||
d3d12_video_encoder_query_d3d12_driver_caps(struct d3d12_video_encoder *pD3D12Enc,
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData);
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 &capEncoderSupportData);
|
||||
#endif
|
||||
bool
|
||||
d3d12_video_encoder_check_subregion_mode_support(struct d3d12_video_encoder *pD3D12Enc,
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE requestedSlicesMode);
|
||||
|
|
|
|||
|
|
@ -1183,17 +1183,13 @@ d3d12_video_encoder_update_current_encoder_config_state_av1(struct d3d12_video_e
|
|||
|
||||
// Will call for d3d12 driver support based on the initial requested (non codec specific) features, then
|
||||
// try to fallback if any of them is not supported and return the negotiated d3d12 settings
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData1 = {};
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 capEncoderSupportData1 = {};
|
||||
#endif
|
||||
if (!d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(pD3D12Enc, capEncoderSupportData1)) {
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData2 = {};
|
||||
if (!d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(pD3D12Enc, capEncoderSupportData2)) {
|
||||
debug_printf("[d3d12_video_encoder_av1] After negotiating caps, D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1 "
|
||||
"arguments are not supported - "
|
||||
"ValidationFlags: 0x%x - SupportFlags: 0x%x\n",
|
||||
capEncoderSupportData1.ValidationFlags,
|
||||
capEncoderSupportData1.SupportFlags);
|
||||
capEncoderSupportData2.ValidationFlags,
|
||||
capEncoderSupportData2.SupportFlags);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
@ -1612,7 +1608,7 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
|
|||
}
|
||||
}
|
||||
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picParams = {};
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 picParams = {};
|
||||
picParams.pAV1PicData = pAV1PicData;
|
||||
picParams.DataSize = sizeof(*pAV1PicData);
|
||||
pD3D12Enc->m_upDPBManager->begin_frame(picParams, bUsedAsReference, picture);
|
||||
|
|
@ -1633,7 +1629,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
|
|||
// pD3D12Enc->m_spEncodedFrameMetadata[current_metadata_slot]
|
||||
// .m_CodecSpecificData.AV1HeadersInfo.temporal_delim_rendered = pAV1Pic->temporal_delim_rendered;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.AppRequested)
|
||||
{
|
||||
// Use 16 bit qpmap array for AV1 picparams (-255, 255 range and int16_t pRateControlQPMap type)
|
||||
|
|
@ -1657,7 +1652,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_av1(struct d3d12_video_
|
|||
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_av1] Using user-provided CPU 16-bit QP map buffer with %d entries ptr = %p\n",
|
||||
pAV1PicData->QPMapValuesCount, pAV1PicData->pRateControlQPMap);
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
|||
|
|
@ -380,7 +380,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_h264(struct d3d12_video
|
|||
if (h264Pic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B)
|
||||
pH264PicData->List1ReferenceFramesCount = h264Pic->num_ref_idx_l1_active_minus1 + 1;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.AppRequested)
|
||||
{
|
||||
// Use 8 bit qpmap array for H264 picparams (-51, 51 range and int8_t pRateControlQPMap type)
|
||||
|
|
@ -405,9 +404,8 @@ d3d12_video_encoder_update_current_frame_pic_params_info_h264(struct d3d12_video
|
|||
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_h264] Using user-provided CPU 8-bit QP map buffer with %d entries ptr = %p\n",
|
||||
pH264PicData->QPMapValuesCount, pH264PicData->pRateControlQPMap);
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picParams = {};
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 picParams = {};
|
||||
picParams.pH264PicData = pH264PicData;
|
||||
picParams.DataSize = sizeof(*pH264PicData);
|
||||
pD3D12Enc->m_upDPBManager->begin_frame(picParams, bUsedAsReference, picture);
|
||||
|
|
@ -537,7 +535,6 @@ d3d12_video_encoder_negotiate_current_h264_slices_configuration(struct d3d12_vid
|
|||
return false;
|
||||
}
|
||||
}
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
else if(picture->slice_mode == PIPE_VIDEO_SLICE_MODE_AUTO) {
|
||||
if (d3d12_video_encoder_check_subregion_mode_support(
|
||||
pD3D12Enc,
|
||||
|
|
@ -553,7 +550,6 @@ d3d12_video_encoder_negotiate_current_h264_slices_configuration(struct d3d12_vid
|
|||
return false;
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
else {
|
||||
requestedSlicesMode = D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_FULL_FRAME;
|
||||
requestedSlicesConfig.NumberOfSlicesPerFrame = 1;
|
||||
|
|
@ -1073,17 +1069,13 @@ d3d12_video_encoder_update_current_encoder_config_state_h264(struct d3d12_video_
|
|||
|
||||
// Will call for d3d12 driver support based on the initial requested features, then
|
||||
// try to fallback if any of them is not supported and return the negotiated d3d12 settings
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData1 = {};
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 capEncoderSupportData1 = {};
|
||||
#endif
|
||||
if (!d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(pD3D12Enc, capEncoderSupportData1)) {
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData2 = {};
|
||||
if (!d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(pD3D12Enc, capEncoderSupportData2)) {
|
||||
debug_printf("[d3d12_video_encoder_h264] After negotiating caps, D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1 "
|
||||
"arguments are not supported - "
|
||||
"ValidationFlags: 0x%x - SupportFlags: 0x%x\n",
|
||||
capEncoderSupportData1.ValidationFlags,
|
||||
capEncoderSupportData1.SupportFlags);
|
||||
capEncoderSupportData2.ValidationFlags,
|
||||
capEncoderSupportData2.SupportFlags);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
@ -1198,7 +1190,7 @@ uint32_t
|
|||
d3d12_video_encoder_build_codec_headers_h264(struct d3d12_video_encoder *pD3D12Enc,
|
||||
std::vector<uint64_t> &pWrittenCodecUnitsSizes)
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA currentPicParams =
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams =
|
||||
d3d12_video_encoder_get_current_picture_param_settings(pD3D12Enc);
|
||||
|
||||
auto levelDesc = d3d12_video_encoder_get_current_level_desc(pD3D12Enc);
|
||||
|
|
|
|||
|
|
@ -348,11 +348,7 @@ void
|
|||
d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video_encoder *pD3D12Enc,
|
||||
struct pipe_video_buffer *srcTexture,
|
||||
struct pipe_picture_desc *picture,
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 *pHEVCPicData,
|
||||
#else
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC *pHEVCPicData,
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool &bUsedAsReference)
|
||||
{
|
||||
struct pipe_h265_enc_picture_desc *hevcPic = (struct pipe_h265_enc_picture_desc *) picture;
|
||||
|
|
@ -369,7 +365,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
|
|||
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_REQUEST_NUM_REF_IDX_ACTIVE_OVERRIDE_FLAG_SLICE;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if ((hevcPic->base.profile == PIPE_VIDEO_PROFILE_HEVC_MAIN_444) ||
|
||||
(hevcPic->base.profile == PIPE_VIDEO_PROFILE_HEVC_MAIN10_444) ||
|
||||
(hevcPic->base.profile == PIPE_VIDEO_PROFILE_HEVC_MAIN_422) ||
|
||||
|
|
@ -533,7 +528,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
pHEVCPicData->slice_pic_parameter_set_id = pHEVCBitstreamBuilder->get_active_pps().pps_pic_parameter_set_id;
|
||||
|
||||
|
|
@ -547,10 +541,8 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
|
|||
// These need to be set here so they're available for SPS/PPS header building (reference manager updates after that, for slice header params)
|
||||
//
|
||||
pHEVCPicData->TemporalLayerIndex = hevcPic->pic.temporal_id;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
pHEVCPicData->num_ref_idx_l0_active_minus1 = 0;
|
||||
pHEVCPicData->num_ref_idx_l1_active_minus1 = 0;
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
pHEVCPicData->List0ReferenceFramesCount = 0;
|
||||
pHEVCPicData->List1ReferenceFramesCount = 0;
|
||||
if ((hevcPic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_P) ||
|
||||
|
|
@ -558,7 +550,6 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
|
|||
{
|
||||
// Assume legacy behavior for now and override below if new SDK/interfaces are used
|
||||
pHEVCPicData->List0ReferenceFramesCount = hevcPic->num_ref_idx_l0_active_minus1 + 1;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
// Only set pHEVCPicData->num_ref_idx_l0_active_minus1/List0ReferenceFramesCount
|
||||
// differently on the newer interfaces that support it
|
||||
// Otherwise fallback to the legacy behavior using List0ReferenceFramesCount
|
||||
|
|
@ -570,14 +561,12 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
|
|||
pHEVCPicData->num_ref_idx_l0_active_minus1 = hevcPic->num_ref_idx_l0_active_minus1;
|
||||
pHEVCPicData->List0ReferenceFramesCount = ref_list0_count;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
|
||||
if (hevcPic->picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B)
|
||||
{
|
||||
// Assume legacy behavior for now and override below if new SDK/interfaces are used
|
||||
pHEVCPicData->List1ReferenceFramesCount = hevcPic->num_ref_idx_l1_active_minus1 + 1;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
// Only set pHEVCPicData->num_ref_idx_l1_active_minus1/List1ReferenceFramesCount
|
||||
// differently on the newer interfaces that support it
|
||||
// Otherwise fallback to the legacy behavior using List1ReferenceFramesCount
|
||||
|
|
@ -589,13 +578,11 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
|
|||
pHEVCPicData->num_ref_idx_l1_active_minus1 = hevcPic->num_ref_idx_l1_active_minus1;
|
||||
pHEVCPicData->List1ReferenceFramesCount = ref_list1_count;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
|
||||
if ((pD3D12Enc->m_currentEncodeConfig.m_encoderCodecSpecificConfigDesc.m_HEVCConfig.ConfigurationFlags
|
||||
& D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC_FLAG_ALLOW_REQUEST_INTRA_CONSTRAINED_SLICES) != 0)
|
||||
pHEVCPicData->Flags |= D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC_FLAG_REQUEST_INTRA_CONSTRAINED_SLICES;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (pD3D12Enc->m_currentEncodeConfig.m_QuantizationMatrixDesc.CPUInput.AppRequested)
|
||||
{
|
||||
// Use 8-bit QP map entries (int8), but clamp to bit-depth dependent HEVC delta QP range (int8_t pRateControlQPMap type)
|
||||
|
|
@ -624,24 +611,12 @@ d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video
|
|||
debug_printf("[d3d12_video_encoder_update_current_frame_pic_params_info_hevc] Using user-provided CPU 8-bit QP map buffer with %d entries ptr = %p\n",
|
||||
pHEVCPicData->QPMapValuesCount, pHEVCPicData->pRateControlQPMap);
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
|
||||
// TODO: Here call begin_frame1 and get_current_frame_picture_control_data1 when applicable
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 picParams1 = {};
|
||||
picParams1.pHEVCPicData = pHEVCPicData;
|
||||
picParams1.DataSize = sizeof(*pHEVCPicData);
|
||||
pD3D12Enc->m_upDPBManager->begin_frame1(picParams1, bUsedAsReference, picture);
|
||||
pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data1(picParams1);
|
||||
#else
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picParams = {};
|
||||
picParams.pHEVCPicData = pHEVCPicData;
|
||||
picParams.DataSize = sizeof(*pHEVCPicData);
|
||||
pD3D12Enc->m_upDPBManager->begin_frame(picParams, bUsedAsReference, picture);
|
||||
pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data(picParams);
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
pD3D12Enc->m_upDPBManager->begin_frame(picParams1, bUsedAsReference, picture);
|
||||
pD3D12Enc->m_upDPBManager->get_current_frame_picture_control_data(picParams1);
|
||||
|
||||
// Save state snapshot from record time to resolve headers at get_feedback time
|
||||
size_t current_metadata_slot = static_cast<size_t>(pD3D12Enc->m_fenceValue % pD3D12Enc->m_MaxMetadataBuffersCount);
|
||||
|
|
@ -763,7 +738,6 @@ d3d12_video_encoder_negotiate_current_hevc_slices_configuration(struct d3d12_vid
|
|||
return false;
|
||||
}
|
||||
}
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
else if(picture->slice_mode == PIPE_VIDEO_SLICE_MODE_AUTO) {
|
||||
if (d3d12_video_encoder_check_subregion_mode_support(
|
||||
pD3D12Enc,
|
||||
|
|
@ -779,7 +753,6 @@ d3d12_video_encoder_negotiate_current_hevc_slices_configuration(struct d3d12_vid
|
|||
return false;
|
||||
}
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
else {
|
||||
requestedSlicesMode = D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_FULL_FRAME;
|
||||
requestedSlicesConfig.NumberOfSlicesPerFrame = 1;
|
||||
|
|
@ -1388,11 +1361,7 @@ d3d12_video_encoder_update_current_encoder_config_state_hevc(struct d3d12_video_
|
|||
|
||||
// Will call for d3d12 driver support based on the initial requested features, then
|
||||
// try to fallback if any of them is not supported and return the negotiated d3d12 settings
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData1 = {};
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 capEncoderSupportData1 = {};
|
||||
#endif
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData2 = {};
|
||||
// Get max number of slices per frame supported
|
||||
if (hevcPic->num_slice_descriptors > 1)
|
||||
pD3D12Enc->m_currentEncodeConfig.m_encoderSliceConfigMode =
|
||||
|
|
@ -1407,12 +1376,12 @@ d3d12_video_encoder_update_current_encoder_config_state_hevc(struct d3d12_video_
|
|||
return false;
|
||||
}
|
||||
|
||||
if (!d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(pD3D12Enc, capEncoderSupportData1)) {
|
||||
debug_printf("[d3d12_video_encoder_hevc] After negotiating caps, D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1 "
|
||||
if (!d3d12_video_encoder_negotiate_requested_features_and_d3d12_driver_caps(pD3D12Enc, capEncoderSupportData2)) {
|
||||
debug_printf("[d3d12_video_encoder_hevc] After negotiating caps, D3D12_FEATURE_VIDEO_ENCODER_SUPPORT2 "
|
||||
"arguments are not supported - "
|
||||
"ValidationFlags: 0x%x - SupportFlags: 0x%x\n",
|
||||
capEncoderSupportData1.ValidationFlags,
|
||||
capEncoderSupportData1.SupportFlags);
|
||||
capEncoderSupportData2.ValidationFlags,
|
||||
capEncoderSupportData2.SupportFlags);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
@ -1558,7 +1527,7 @@ uint32_t
|
|||
d3d12_video_encoder_build_codec_headers_hevc(struct d3d12_video_encoder *pD3D12Enc,
|
||||
std::vector<uint64_t> &pWrittenCodecUnitsSizes)
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA currentPicParams =
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 currentPicParams =
|
||||
d3d12_video_encoder_get_current_picture_param_settings(pD3D12Enc);
|
||||
|
||||
auto profDesc = d3d12_video_encoder_get_current_profile_desc(pD3D12Enc);
|
||||
|
|
|
|||
|
|
@ -52,12 +52,8 @@ void
|
|||
d3d12_video_encoder_update_current_frame_pic_params_info_hevc(struct d3d12_video_encoder *pD3D12Enc,
|
||||
struct pipe_video_buffer * srcTexture,
|
||||
struct pipe_picture_desc * picture,
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 *pHEVCPicData,
|
||||
#else
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC *pHEVCPicData,
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool &bUsedAsReference);
|
||||
bool &bUsedAsReference);
|
||||
uint32_t
|
||||
d3d12_video_encoder_build_codec_headers_hevc(struct d3d12_video_encoder *pD3D12Enc,
|
||||
std::vector<uint64_t> &pWrittenCodecUnitsSizes);
|
||||
|
|
|
|||
|
|
@ -587,7 +587,7 @@ d3d12_video_bitstream_builder_hevc::build_pps(const struct pipe_h265_enc_pic_par
|
|||
const HevcSeqParameterSet& parentSPS,
|
||||
uint8_t pic_parameter_set_id,
|
||||
const D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC& codecConfig,
|
||||
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC& pictureControl,
|
||||
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2& pictureControl,
|
||||
std::vector<BYTE> &headerBitstream,
|
||||
std::vector<BYTE>::iterator placingPositionStart,
|
||||
size_t &writtenBytes)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class d3d12_video_bitstream_builder_hevc : public d3d12_video_bitstream_builder_
|
|||
const HevcSeqParameterSet& parentSPS,
|
||||
uint8_t pic_parameter_set_id,
|
||||
const D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC& codecConfig,
|
||||
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC& pictureControl,
|
||||
const D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2& pictureControl,
|
||||
std::vector<BYTE> &headerBitstream,
|
||||
std::vector<BYTE>::iterator placingPositionStart,
|
||||
size_t &writtenBytes);
|
||||
|
|
|
|||
|
|
@ -30,16 +30,10 @@
|
|||
class d3d12_video_encoder_references_manager_interface
|
||||
{
|
||||
public:
|
||||
virtual void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA, bool bUsedAsReference, struct pipe_picture_desc* picture) = 0;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
virtual void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1, bool bUsedAsReference, struct pipe_picture_desc* picture) = 0;
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
virtual void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1, bool bUsedAsReference, struct pipe_picture_desc* picture) = 0;
|
||||
virtual void end_frame() = 0;
|
||||
virtual D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE get_current_frame_recon_pic_output_allocation() = 0;
|
||||
virtual bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation) = 0;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
virtual bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation) = 0;
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
virtual bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation) = 0;
|
||||
virtual bool is_current_frame_used_as_reference() = 0;
|
||||
virtual D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames() = 0;
|
||||
virtual ~d3d12_video_encoder_references_manager_interface()
|
||||
|
|
|
|||
|
|
@ -76,30 +76,12 @@ d3d12_video_encoder_references_manager_av1::is_current_frame_used_as_reference()
|
|||
return m_isCurrentFrameUsedAsReference;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
void
|
||||
d3d12_video_encoder_references_manager_av1::begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
d3d12_video_encoder_references_manager_av1::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
m_CurrentFramePicParams = *curFrameData.pAV1PicData;
|
||||
begin_frame_impl(bUsedAsReference, picture);
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
void
|
||||
d3d12_video_encoder_references_manager_av1::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
m_CurrentFramePicParams = *curFrameData.pAV1PicData;
|
||||
begin_frame_impl(bUsedAsReference, picture);
|
||||
}
|
||||
|
||||
void
|
||||
d3d12_video_encoder_references_manager_av1::begin_frame_impl(bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
m_isCurrentFrameUsedAsReference = bUsedAsReference;
|
||||
|
||||
if (m_CurrentFramePicParams.FrameType == D3D12_VIDEO_ENCODER_AV1_FRAME_TYPE_KEY_FRAME)
|
||||
|
|
@ -312,24 +294,9 @@ d3d12_video_encoder_references_manager_av1::get_dpb_physical_slot_refcount_from_
|
|||
return refCount;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool
|
||||
d3d12_video_encoder_references_manager_av1::get_current_frame_picture_control_data1(
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picData = {};
|
||||
picData.DataSize = codecAllocation.DataSize;
|
||||
picData.pAV1PicData = codecAllocation.pAV1PicData;
|
||||
bool res = get_current_frame_picture_control_data(picData);
|
||||
codecAllocation.DataSize = picData.DataSize;
|
||||
codecAllocation.pAV1PicData = picData.pAV1PicData;
|
||||
return res;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
bool
|
||||
d3d12_video_encoder_references_manager_av1::get_current_frame_picture_control_data(
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation)
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
|
||||
{
|
||||
assert(m_CurrentFrameReferencesData.pVirtualDPBEntries.size() == NUM_REF_FRAMES);
|
||||
assert(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_AV1_PICTURE_CONTROL_CODEC_DATA));
|
||||
|
|
|
|||
|
|
@ -32,17 +32,11 @@ class d3d12_video_encoder_references_manager_av1 : public d3d12_video_encoder_re
|
|||
{
|
||||
public:
|
||||
void end_frame();
|
||||
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
|
||||
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
D3D12_VIDEO_ENCODER_RECONSTRUCTED_PICTURE get_current_frame_recon_pic_output_allocation();
|
||||
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation);
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
|
||||
bool is_current_frame_used_as_reference();
|
||||
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames();
|
||||
|
||||
|
|
@ -64,8 +58,6 @@ class d3d12_video_encoder_references_manager_av1 : public d3d12_video_encoder_re
|
|||
void print_virtual_dpb_entries();
|
||||
void print_physical_resource_references();
|
||||
void print_ref_frame_idx();
|
||||
void begin_frame_impl(bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
|
||||
// Class members
|
||||
|
||||
|
|
|
|||
|
|
@ -30,24 +30,9 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool
|
||||
d3d12_video_encoder_references_manager_h264::get_current_frame_picture_control_data1(
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
|
||||
{
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA picData = {};
|
||||
picData.DataSize = codecAllocation.DataSize;
|
||||
picData.pH264PicData = codecAllocation.pH264PicData;
|
||||
bool res = get_current_frame_picture_control_data(picData);
|
||||
codecAllocation.DataSize = picData.DataSize;
|
||||
codecAllocation.pH264PicData = picData.pH264PicData;
|
||||
return res;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
bool
|
||||
d3d12_video_encoder_references_manager_h264::get_current_frame_picture_control_data(
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation)
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
|
||||
{
|
||||
assert(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264));
|
||||
if (codecAllocation.DataSize != sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_H264))
|
||||
|
|
@ -289,30 +274,12 @@ d3d12_video_encoder_convert_frame_type_h264(enum pipe_h2645_enc_picture_type pic
|
|||
}
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
void
|
||||
d3d12_video_encoder_references_manager_h264::begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
m_curFrameState = *curFrameData.pH264PicData;
|
||||
begin_frame_impl(bUsedAsReference, picture);
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
void
|
||||
d3d12_video_encoder_references_manager_h264::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
|
||||
d3d12_video_encoder_references_manager_h264::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
m_curFrameState = *curFrameData.pH264PicData;
|
||||
begin_frame_impl(bUsedAsReference, picture);
|
||||
}
|
||||
|
||||
void
|
||||
d3d12_video_encoder_references_manager_h264::begin_frame_impl(bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
m_isCurrentFrameUsedAsReference = bUsedAsReference;
|
||||
|
||||
struct pipe_h264_enc_picture_desc *h264Pic = (struct pipe_h264_enc_picture_desc *) picture;
|
||||
|
|
|
|||
|
|
@ -30,16 +30,10 @@
|
|||
class d3d12_video_encoder_references_manager_h264 : public d3d12_video_encoder_references_manager_interface
|
||||
{
|
||||
public:
|
||||
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
|
||||
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation);
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
|
||||
|
||||
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames();
|
||||
|
||||
|
|
@ -64,8 +58,6 @@ class d3d12_video_encoder_references_manager_h264 : public d3d12_video_encoder_r
|
|||
void print_dpb();
|
||||
void print_l0_l1_lists();
|
||||
void print_mmco_lists();
|
||||
void begin_frame_impl(bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
|
||||
// Class members
|
||||
struct d3d12_video_dpb
|
||||
|
|
|
|||
|
|
@ -31,23 +31,16 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool
|
||||
d3d12_video_encoder_references_manager_hevc::get_current_frame_picture_control_data1(
|
||||
d3d12_video_encoder_references_manager_hevc::get_current_frame_picture_control_data(
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation)
|
||||
{
|
||||
assert(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
|
||||
memcpy(codecAllocation.pHEVCPicData, &m_curFrameState, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
|
||||
return true;
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
if (codecAllocation.DataSize != sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2))
|
||||
return false;
|
||||
|
||||
*codecAllocation.pHEVCPicData = m_curFrameState;
|
||||
|
||||
bool
|
||||
d3d12_video_encoder_references_manager_hevc::get_current_frame_picture_control_data(
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation)
|
||||
{
|
||||
assert(codecAllocation.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
|
||||
memcpy(codecAllocation.pHEVCPicData, &m_curFrameState, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
@ -253,33 +246,12 @@ d3d12_video_encoder_convert_frame_type_hevc(enum pipe_h2645_enc_picture_type pic
|
|||
}
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
void
|
||||
d3d12_video_encoder_references_manager_hevc::begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
assert(curFrameData.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
|
||||
memcpy(&m_curFrameState, curFrameData.pHEVCPicData, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2));
|
||||
begin_frame_impl(bUsedAsReference, picture);
|
||||
}
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
void
|
||||
d3d12_video_encoder_references_manager_hevc::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
|
||||
d3d12_video_encoder_references_manager_hevc::begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
assert(curFrameData.DataSize == sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
|
||||
memcpy(&m_curFrameState, curFrameData.pHEVCPicData, sizeof(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC));
|
||||
memset(((uint8_t*)(&m_curFrameState) + curFrameData.DataSize), 0, sizeof(m_curFrameState) - curFrameData.DataSize);
|
||||
begin_frame_impl(bUsedAsReference, picture);
|
||||
}
|
||||
|
||||
void
|
||||
d3d12_video_encoder_references_manager_hevc::begin_frame_impl(bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture)
|
||||
{
|
||||
m_curFrameState = *curFrameData.pHEVCPicData;
|
||||
m_isCurrentFrameUsedAsReference = bUsedAsReference;
|
||||
|
||||
struct pipe_h265_enc_picture_desc *hevcPic = (struct pipe_h265_enc_picture_desc *) picture;
|
||||
|
|
|
|||
|
|
@ -30,16 +30,10 @@
|
|||
class d3d12_video_encoder_references_manager_hevc : public d3d12_video_encoder_references_manager_interface
|
||||
{
|
||||
public:
|
||||
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA curFrameData,
|
||||
void begin_frame(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA &codecAllocation);
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
void begin_frame1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 curFrameData,
|
||||
bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
bool get_current_frame_picture_control_data1(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool get_current_frame_picture_control_data(D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA1 &codecAllocation);
|
||||
D3D12_VIDEO_ENCODE_REFERENCE_FRAMES get_current_reference_frames();
|
||||
|
||||
bool is_current_frame_used_as_reference()
|
||||
|
|
@ -64,8 +58,6 @@ class d3d12_video_encoder_references_manager_hevc : public d3d12_video_encoder_r
|
|||
void update_fifo_dpb_push_front_cur_recon_pic();
|
||||
void print_dpb();
|
||||
void print_l0_l1_lists();
|
||||
void begin_frame_impl(bool bUsedAsReference,
|
||||
struct pipe_picture_desc *picture);
|
||||
|
||||
// Class members
|
||||
struct d3d12_video_dpb
|
||||
|
|
@ -88,11 +80,7 @@ class d3d12_video_encoder_references_manager_hevc : public d3d12_video_encoder_r
|
|||
current_frame_references_data m_CurrentFrameReferencesData;
|
||||
|
||||
bool m_isCurrentFrameUsedAsReference = false;
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC2 m_curFrameState = {};
|
||||
#else
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_CODEC_DATA_HEVC m_curFrameState = {};
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
bool m_fArrayOfTextures = false;
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -306,8 +306,6 @@ d3d12_video_processor_destroy(struct pipe_video_codec * codec)
|
|||
d3d12_video_processor_sync_completion(codec, curBatchFence, OS_TIMEOUT_INFINITE);
|
||||
}
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
struct d3d12_context* ctx = d3d12_context(pD3D12Proc->base.context);
|
||||
if (ctx->priority_manager)
|
||||
{
|
||||
|
|
@ -318,8 +316,6 @@ d3d12_video_processor_destroy(struct pipe_video_codec * codec)
|
|||
}
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
// Call dtor to make ComPtr work
|
||||
delete pD3D12Proc;
|
||||
}
|
||||
|
|
@ -488,8 +484,6 @@ d3d12_video_processor_create(struct pipe_context *context, const struct pipe_vid
|
|||
|
||||
debug_printf("[d3d12_video_processor] d3d12_video_create_processor - Created successfully!\n");
|
||||
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
if (pD3D12Ctx->priority_manager)
|
||||
{
|
||||
// Register queue with priority manager
|
||||
|
|
@ -501,8 +495,6 @@ d3d12_video_processor_create(struct pipe_context *context, const struct pipe_vid
|
|||
}
|
||||
}
|
||||
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
|
||||
return &pD3D12Proc->base;
|
||||
|
||||
failed:
|
||||
|
|
@ -682,10 +674,8 @@ d3d12_video_processor_create_command_objects(struct d3d12_video_processor *pD3D1
|
|||
assert(pD3D12Proc->m_spD3D12VideoDevice);
|
||||
|
||||
D3D12_COMMAND_QUEUE_DESC commandQueueDesc = { D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS };
|
||||
#if ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
if (pD3D12Proc->m_pD3D12Screen->supports_dynamic_queue_priority)
|
||||
commandQueueDesc.Flags |= D3D12_COMMAND_QUEUE_FLAG_ALLOW_DYNAMIC_PRIORITY;
|
||||
#endif // ( USE_D3D12_PREVIEW_HEADERS && ( D3D12_PREVIEW_SDK_VERSION >= 717 ) )
|
||||
HRESULT hr = pD3D12Proc->m_pD3D12Screen->dev->CreateCommandQueue(
|
||||
&commandQueueDesc,
|
||||
IID_PPV_ARGS(pD3D12Proc->m_spCommandQueue.GetAddressOf()));
|
||||
|
|
|
|||
|
|
@ -593,7 +593,7 @@ d3d12_video_encode_supported_slice_structures(const D3D12_VIDEO_ENCODER_CODEC &c
|
|||
/*
|
||||
All these structures must be present in memory (stack scope) when calling
|
||||
CheckFeatureSupport and for any subsequent read from d3d12_video_encode_support_caps
|
||||
capEncoderSupportData1 in/out parameter
|
||||
capEncoderSupportData in/out parameter
|
||||
*/
|
||||
struct d3d12_encode_support_cap_allocations
|
||||
{
|
||||
|
|
@ -619,42 +619,37 @@ d3d12_video_encode_support_caps(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec,
|
|||
DXGI_FORMAT encodeFormat,
|
||||
ID3D12VideoDevice3 *pD3D12VideoDevice,
|
||||
D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT codecSupport,
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData1,
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 &capEncoderSupportData,
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS1 &resolutionDepCaps,
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 &capEncoderSupportData1,
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS &resolutionDepCaps,
|
||||
#endif
|
||||
uint32_t &maxQualityLevels,
|
||||
struct d3d12_encode_support_cap_allocations &cap_allocations,
|
||||
union pipe_enc_cap_spatial_adaptive_quantization &saqSupport)
|
||||
{
|
||||
capEncoderSupportData1.NodeIndex = 0;
|
||||
capEncoderSupportData1.Codec = argTargetCodec;
|
||||
capEncoderSupportData1.InputFormat = encodeFormat;
|
||||
capEncoderSupportData1.RateControl = {};
|
||||
capEncoderSupportData1.RateControl.Mode = D3D12_VIDEO_ENCODER_RATE_CONTROL_MODE_CQP;
|
||||
capEncoderSupportData1.RateControl.TargetFrameRate.Numerator = 60;
|
||||
capEncoderSupportData1.RateControl.TargetFrameRate.Denominator = 1;
|
||||
capEncoderSupportData1.RateControl.ConfigParams.pConfiguration_CQP = &cap_allocations.rcCqp;
|
||||
capEncoderSupportData1.RateControl.ConfigParams.DataSize = sizeof(cap_allocations.rcCqp);
|
||||
capEncoderSupportData1.IntraRefresh = D3D12_VIDEO_ENCODER_INTRA_REFRESH_MODE_NONE;
|
||||
capEncoderSupportData1.ResolutionsListCount = 1;
|
||||
capEncoderSupportData1.pResolutionList = &maxResolution;
|
||||
capEncoderSupportData1.MaxReferenceFramesInDPB = 1;
|
||||
capEncoderSupportData.NodeIndex = 0;
|
||||
capEncoderSupportData.Codec = argTargetCodec;
|
||||
capEncoderSupportData.InputFormat = encodeFormat;
|
||||
capEncoderSupportData.RateControl = {};
|
||||
capEncoderSupportData.RateControl.Mode = D3D12_VIDEO_ENCODER_RATE_CONTROL_MODE_CQP;
|
||||
capEncoderSupportData.RateControl.TargetFrameRate.Numerator = 60;
|
||||
capEncoderSupportData.RateControl.TargetFrameRate.Denominator = 1;
|
||||
capEncoderSupportData.RateControl.ConfigParams.pConfiguration_CQP = &cap_allocations.rcCqp;
|
||||
capEncoderSupportData.RateControl.ConfigParams.DataSize = sizeof(cap_allocations.rcCqp);
|
||||
capEncoderSupportData.IntraRefresh = D3D12_VIDEO_ENCODER_INTRA_REFRESH_MODE_NONE;
|
||||
capEncoderSupportData.ResolutionsListCount = 1;
|
||||
capEncoderSupportData.pResolutionList = &maxResolution;
|
||||
capEncoderSupportData.MaxReferenceFramesInDPB = 1;
|
||||
switch (argTargetCodec) {
|
||||
case D3D12_VIDEO_ENCODER_CODEC_H264:
|
||||
{
|
||||
// assert(codecSupport.pH264Support); // Fill this in caller if ever used
|
||||
capEncoderSupportData1.SuggestedProfile.pH264Profile = &cap_allocations.h264prof;
|
||||
capEncoderSupportData1.SuggestedProfile.DataSize = sizeof(cap_allocations.h264prof);
|
||||
capEncoderSupportData1.SuggestedLevel.pH264LevelSetting = &cap_allocations.h264lvl;
|
||||
capEncoderSupportData1.SuggestedLevel.DataSize = sizeof(cap_allocations.h264lvl);
|
||||
capEncoderSupportData1.CodecGopSequence.pH264GroupOfPictures = &cap_allocations.h264Gop;
|
||||
capEncoderSupportData1.CodecGopSequence.DataSize = sizeof(cap_allocations.h264Gop);
|
||||
capEncoderSupportData1.CodecConfiguration.DataSize = sizeof(cap_allocations.h264Config);
|
||||
capEncoderSupportData1.CodecConfiguration.pH264Config = &cap_allocations.h264Config;
|
||||
capEncoderSupportData.SuggestedProfile.pH264Profile = &cap_allocations.h264prof;
|
||||
capEncoderSupportData.SuggestedProfile.DataSize = sizeof(cap_allocations.h264prof);
|
||||
capEncoderSupportData.SuggestedLevel.pH264LevelSetting = &cap_allocations.h264lvl;
|
||||
capEncoderSupportData.SuggestedLevel.DataSize = sizeof(cap_allocations.h264lvl);
|
||||
capEncoderSupportData.CodecGopSequence.pH264GroupOfPictures = &cap_allocations.h264Gop;
|
||||
capEncoderSupportData.CodecGopSequence.DataSize = sizeof(cap_allocations.h264Gop);
|
||||
capEncoderSupportData.CodecConfiguration.DataSize = sizeof(cap_allocations.h264Config);
|
||||
capEncoderSupportData.CodecConfiguration.pH264Config = &cap_allocations.h264Config;
|
||||
} break;
|
||||
|
||||
case D3D12_VIDEO_ENCODER_CODEC_HEVC:
|
||||
|
|
@ -674,24 +669,24 @@ d3d12_video_encode_support_caps(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec,
|
|||
if ((codecSupport.pHEVCSupport->SupportFlags & D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT_HEVC_FLAG_ASYMETRIC_MOTION_PARTITION_REQUIRED) != 0)
|
||||
cap_allocations.hevcConfig.ConfigurationFlags |= D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION_HEVC_FLAG_USE_ASYMETRIC_MOTION_PARTITION;
|
||||
|
||||
capEncoderSupportData1.SuggestedProfile.pHEVCProfile = &cap_allocations.hevcprof;
|
||||
capEncoderSupportData1.SuggestedProfile.DataSize = sizeof(cap_allocations.hevcprof);
|
||||
capEncoderSupportData1.SuggestedLevel.pHEVCLevelSetting = &cap_allocations.hevcLvl;
|
||||
capEncoderSupportData1.SuggestedLevel.DataSize = sizeof(cap_allocations.hevcLvl);
|
||||
capEncoderSupportData1.CodecGopSequence.pHEVCGroupOfPictures = &cap_allocations.hevcGop;
|
||||
capEncoderSupportData1.CodecGopSequence.DataSize = sizeof(cap_allocations.hevcGop);
|
||||
capEncoderSupportData1.CodecConfiguration.DataSize = sizeof(cap_allocations.hevcConfig);
|
||||
capEncoderSupportData1.CodecConfiguration.pHEVCConfig = &cap_allocations.hevcConfig;
|
||||
capEncoderSupportData.SuggestedProfile.pHEVCProfile = &cap_allocations.hevcprof;
|
||||
capEncoderSupportData.SuggestedProfile.DataSize = sizeof(cap_allocations.hevcprof);
|
||||
capEncoderSupportData.SuggestedLevel.pHEVCLevelSetting = &cap_allocations.hevcLvl;
|
||||
capEncoderSupportData.SuggestedLevel.DataSize = sizeof(cap_allocations.hevcLvl);
|
||||
capEncoderSupportData.CodecGopSequence.pHEVCGroupOfPictures = &cap_allocations.hevcGop;
|
||||
capEncoderSupportData.CodecGopSequence.DataSize = sizeof(cap_allocations.hevcGop);
|
||||
capEncoderSupportData.CodecConfiguration.DataSize = sizeof(cap_allocations.hevcConfig);
|
||||
capEncoderSupportData.CodecConfiguration.pHEVCConfig = &cap_allocations.hevcConfig;
|
||||
} break;
|
||||
|
||||
case D3D12_VIDEO_ENCODER_CODEC_AV1:
|
||||
{
|
||||
capEncoderSupportData1.SuggestedProfile.pAV1Profile = &cap_allocations.av1prof;
|
||||
capEncoderSupportData1.SuggestedProfile.DataSize = sizeof(cap_allocations.av1prof);
|
||||
capEncoderSupportData1.SuggestedLevel.pAV1LevelSetting = &cap_allocations.av1Lvl;
|
||||
capEncoderSupportData1.SuggestedLevel.DataSize = sizeof(cap_allocations.av1Lvl);
|
||||
capEncoderSupportData1.CodecGopSequence.pAV1SequenceStructure = &cap_allocations.av1Gop;
|
||||
capEncoderSupportData1.CodecGopSequence.DataSize = sizeof(cap_allocations.av1Gop);
|
||||
capEncoderSupportData.SuggestedProfile.pAV1Profile = &cap_allocations.av1prof;
|
||||
capEncoderSupportData.SuggestedProfile.DataSize = sizeof(cap_allocations.av1prof);
|
||||
capEncoderSupportData.SuggestedLevel.pAV1LevelSetting = &cap_allocations.av1Lvl;
|
||||
capEncoderSupportData.SuggestedLevel.DataSize = sizeof(cap_allocations.av1Lvl);
|
||||
capEncoderSupportData.CodecGopSequence.pAV1SequenceStructure = &cap_allocations.av1Gop;
|
||||
capEncoderSupportData.CodecGopSequence.DataSize = sizeof(cap_allocations.av1Gop);
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_CODEC_CONFIGURATION_SUPPORT capCodecConfigData = { };
|
||||
capCodecConfigData.NodeIndex = 0;
|
||||
capCodecConfigData.Codec = D3D12_VIDEO_ENCODER_CODEC_AV1;
|
||||
|
|
@ -710,8 +705,8 @@ d3d12_video_encode_support_caps(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec,
|
|||
}
|
||||
cap_allocations.av1Config.OrderHintBitsMinus1 = 7;
|
||||
cap_allocations.av1Config.FeatureFlags = av1CodecSupport.RequiredFeatureFlags;
|
||||
capEncoderSupportData1.CodecConfiguration.DataSize = sizeof(cap_allocations.av1Config);
|
||||
capEncoderSupportData1.CodecConfiguration.pAV1Config = &cap_allocations.av1Config;
|
||||
capEncoderSupportData.CodecConfiguration.DataSize = sizeof(cap_allocations.av1Config);
|
||||
capEncoderSupportData.CodecConfiguration.pAV1Config = &cap_allocations.av1Config;
|
||||
} break;
|
||||
default:
|
||||
{
|
||||
|
|
@ -721,12 +716,11 @@ d3d12_video_encode_support_caps(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec,
|
|||
|
||||
// prepare inout storage for the resolution dependent result.
|
||||
resolutionDepCaps = {};
|
||||
capEncoderSupportData1.pResolutionDependentSupport = &resolutionDepCaps;
|
||||
capEncoderSupportData.pResolutionDependentSupport = &resolutionDepCaps;
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
HRESULT hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT2,
|
||||
&capEncoderSupportData1,
|
||||
sizeof(capEncoderSupportData1));
|
||||
&capEncoderSupportData,
|
||||
sizeof(capEncoderSupportData));
|
||||
|
||||
if (FAILED(hr)) {
|
||||
debug_printf("CheckFeatureSupport D3D12_FEATURE_VIDEO_ENCODER_SUPPORT2 failed with HR %x\n", (unsigned)hr);
|
||||
|
|
@ -734,25 +728,19 @@ d3d12_video_encode_support_caps(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec,
|
|||
|
||||
// D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 extends D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1
|
||||
// in a binary compatible way, so just cast it and try with the older query D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1*>(&capEncoderSupportData1);
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1*>(&capEncoderSupportData);
|
||||
hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1,
|
||||
casted_down_cap_data,
|
||||
sizeof(D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1));
|
||||
}
|
||||
|
||||
#else
|
||||
HRESULT hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1,
|
||||
&capEncoderSupportData1,
|
||||
sizeof(capEncoderSupportData1));
|
||||
#endif
|
||||
|
||||
if (FAILED(hr)) {
|
||||
debug_printf("CheckFeatureSupport D3D12_FEATURE_VIDEO_ENCODER_SUPPORT1 failed with HR %x\n", (unsigned)hr);
|
||||
debug_printf("Falling back to check previous query version D3D12_FEATURE_VIDEO_ENCODER_SUPPORT...\n");
|
||||
|
||||
// D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 extends D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT
|
||||
// in a binary compatible way, so just cast it and try with the older query D3D12_FEATURE_VIDEO_ENCODER_SUPPORT
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT*>(&capEncoderSupportData1);
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT * casted_down_cap_data = reinterpret_cast<D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT*>(&capEncoderSupportData);
|
||||
hr = pD3D12VideoDevice->CheckFeatureSupport(D3D12_FEATURE_VIDEO_ENCODER_SUPPORT,
|
||||
casted_down_cap_data,
|
||||
sizeof(D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT));
|
||||
|
|
@ -771,18 +759,18 @@ d3d12_video_encode_support_caps(const D3D12_VIDEO_ENCODER_CODEC &argTargetCodec,
|
|||
// A lower value means higher quality, and a value of 1 represents the highest quality.
|
||||
// The quality level setting is used as a trade-off between quality and speed/power
|
||||
// consumption, with higher quality corresponds to lower speed and higher power consumption.
|
||||
maxQualityLevels = capEncoderSupportData1.MaxQualityVsSpeed + 1; // VA range starts from 1, D3D12 starts from 0
|
||||
maxQualityLevels = capEncoderSupportData.MaxQualityVsSpeed + 1; // VA range starts from 1, D3D12 starts from 0
|
||||
|
||||
saqSupport.bits.max_spatial_adaptive_quantization_strength = 0u;
|
||||
|
||||
if ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_SPATIAL_ADAPTIVE_QP_AVAILABLE) != 0)
|
||||
if ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_SPATIAL_ADAPTIVE_QP_AVAILABLE) != 0)
|
||||
{
|
||||
saqSupport.bits.max_spatial_adaptive_quantization_strength = 1u;
|
||||
}
|
||||
|
||||
bool configSupported =
|
||||
(((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_GENERAL_SUPPORT_OK) != 0) &&
|
||||
(capEncoderSupportData1.ValidationFlags == D3D12_VIDEO_ENCODER_VALIDATION_FLAG_NONE));
|
||||
(((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_GENERAL_SUPPORT_OK) != 0) &&
|
||||
(capEncoderSupportData.ValidationFlags == D3D12_VIDEO_ENCODER_VALIDATION_FLAG_NONE));
|
||||
|
||||
return configSupported;
|
||||
}
|
||||
|
|
@ -987,8 +975,6 @@ d3d12_has_video_process_support(struct pipe_screen *pscreen,
|
|||
return VideoFeatureAreaSupport.VideoProcessSupport && bSupportsAny;
|
||||
}
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
static
|
||||
union pipe_enc_cap_two_pass
|
||||
query_two_pass_support(struct pipe_screen *pscreen,
|
||||
|
|
@ -1467,8 +1453,6 @@ get_qpmap_gpuinput_support(D3D12_VIDEO_ENCODER_INPUT_MAP_SESSION_INFO sessionInf
|
|||
return qpmap_gpu_support;
|
||||
}
|
||||
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
static bool
|
||||
d3d12_has_video_encode_support(struct pipe_screen *pscreen,
|
||||
enum pipe_video_profile profile,
|
||||
|
|
@ -1561,38 +1545,33 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
|
|||
profile,
|
||||
level,
|
||||
spD3D12VideoDevice.Get());
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData1 = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS1 resolutionDepCaps;
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 capEncoderSupportData1 = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS resolutionDepCaps;
|
||||
#endif
|
||||
capEncoderSupportData1.SubregionFrameEncoding = (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE) ?
|
||||
capEncoderSupportData.SubregionFrameEncoding = (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE) ?
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_FULL_FRAME :
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_UNIFORM_PARTITIONING_SUBREGIONS_PER_FRAME;
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_SUBREGIONS_LAYOUT_DATA_SLICES sliceData = { };
|
||||
capEncoderSupportData1.SubregionFrameEncodingData.DataSize = sizeof(sliceData);
|
||||
capEncoderSupportData1.SubregionFrameEncodingData.pSlicesPartition_H264 = &sliceData;
|
||||
capEncoderSupportData.SubregionFrameEncodingData.DataSize = sizeof(sliceData);
|
||||
capEncoderSupportData.SubregionFrameEncodingData.pSlicesPartition_H264 = &sliceData;
|
||||
d3d12_encode_support_cap_allocations cap_allocations = {};
|
||||
supportsProfile = supportsProfile && d3d12_video_encode_support_caps(codecDesc,
|
||||
maxRes,
|
||||
encodeFormat,
|
||||
spD3D12VideoDevice.Get(),
|
||||
d3d12_codec_support,
|
||||
capEncoderSupportData1,
|
||||
capEncoderSupportData,
|
||||
resolutionDepCaps,
|
||||
maxQualityLevels,
|
||||
cap_allocations,
|
||||
saqSupport);
|
||||
bVideoEncodeRequiresTextureArray = (capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RECONSTRUCTED_FRAMES_REQUIRE_TEXTURE_ARRAYS) != 0;
|
||||
bVideoEncodeRequiresTextureArray = (capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RECONSTRUCTED_FRAMES_REQUIRE_TEXTURE_ARRAYS) != 0;
|
||||
if (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE)
|
||||
maxSlices = 0;
|
||||
else
|
||||
maxSlices = resolutionDepCaps.MaxSubregionsNumber;
|
||||
|
||||
maxIRDuration = resolutionDepCaps.MaxIntraRefreshFrameDuration;
|
||||
isRCMaxFrameSizeSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0) ? 1 : 0;
|
||||
isRCMaxFrameSizeSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0) ? 1 : 0;
|
||||
maxReferencesPerFrame =
|
||||
d3d12_video_encode_supported_references_per_frame_structures(codecDesc,
|
||||
profile,
|
||||
|
|
@ -1602,47 +1581,44 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
|
|||
maxDPBCapacity);
|
||||
|
||||
memset(&roi_support, 0, sizeof(roi_support));
|
||||
roi_support.bits.roi_rc_qp_delta_support = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0) ? 1 : 0;
|
||||
roi_support.bits.roi_rc_qp_delta_support = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0) ? 1 : 0;
|
||||
roi_support.bits.num_roi_regions = roi_support.bits.roi_rc_qp_delta_support ? PIPE_ENC_ROI_REGION_NUM_MAX : 0;
|
||||
roi_support.bits.log2_roi_min_block_pixel_size = static_cast<uint32_t>(std::log2(capEncoderSupportData1.pResolutionDependentSupport[0].QPMapRegionPixelsSize));
|
||||
roi_support.bits.log2_roi_min_block_pixel_size = static_cast<uint32_t>(std::log2(capEncoderSupportData.pResolutionDependentSupport[0].QPMapRegionPixelsSize));
|
||||
|
||||
supportsProfile = d3d12_video_encode_get_h264_codec_support(profDesc,
|
||||
spD3D12VideoDevice.Get(),
|
||||
codecSupport.h264_support.d3d12_caps);
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_SESSION_INFO sessionInfo =
|
||||
{
|
||||
// D3D12_VIDEO_ENCODER_CODEC Codec;
|
||||
capEncoderSupportData1.Codec,
|
||||
capEncoderSupportData.Codec,
|
||||
// D3D12_VIDEO_ENCODER_PROFILE_DESC Profile;
|
||||
profDesc,
|
||||
// D3D12_VIDEO_ENCODER_LEVEL_SETTING Level;
|
||||
maxLvl,
|
||||
// DXGI_FORMAT InputFormat;
|
||||
capEncoderSupportData1.InputFormat,
|
||||
capEncoderSupportData.InputFormat,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC InputResolution;
|
||||
maxRes,
|
||||
// D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION CodecConfiguration;
|
||||
capEncoderSupportData1.CodecConfiguration,
|
||||
capEncoderSupportData.CodecConfiguration,
|
||||
// D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE SubregionFrameEncoding;
|
||||
capEncoderSupportData1.SubregionFrameEncoding,
|
||||
capEncoderSupportData.SubregionFrameEncoding,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_SUBREGIONS_LAYOUT_DATA SubregionFrameEncodingData;
|
||||
capEncoderSupportData1.SubregionFrameEncodingData,
|
||||
capEncoderSupportData.SubregionFrameEncodingData,
|
||||
};
|
||||
|
||||
dirty_rects_support = get_dirty_rects_support(sessionInfo, spD3D12VideoDevice.Get(), D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER);
|
||||
dirty_rects_support_gpu = get_dirty_rects_support(sessionInfo, spD3D12VideoDevice.Get(), D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE);
|
||||
move_rects_support = get_move_rects_support(sessionInfo, spD3D12VideoDevice.Get());
|
||||
get_gpu_output_stats_support(sessionInfo, capEncoderSupportData1.SupportFlags, spD3D12VideoDevice.Get(), gpu_stats_qp, gpu_stats_satd, gpu_stats_rcbits, psnr_support);
|
||||
sliced_encode_support = get_sliced_encode_support(capEncoderSupportData1.SupportFlags);
|
||||
get_gpu_output_stats_support(sessionInfo, capEncoderSupportData.SupportFlags, spD3D12VideoDevice.Get(), gpu_stats_qp, gpu_stats_satd, gpu_stats_rcbits, psnr_support);
|
||||
sliced_encode_support = get_sliced_encode_support(capEncoderSupportData.SupportFlags);
|
||||
qpmap_support = get_qpmap_gpuinput_support(sessionInfo, spD3D12VideoDevice.Get());
|
||||
gpu_motion_input_support = get_motion_gpuinput_support(sessionInfo, spD3D12VideoDevice.Get());
|
||||
two_pass_support = query_two_pass_support(pscreen, sessionInfo, spD3D12VideoDevice.Get(),
|
||||
((capEncoderSupportData1.SupportFlags &
|
||||
((capEncoderSupportData.SupportFlags &
|
||||
D3D12_VIDEO_ENCODER_SUPPORT_FLAG_READABLE_RECONSTRUCTED_PICTURE_LAYOUT_AVAILABLE) != 0));
|
||||
#endif
|
||||
}
|
||||
} break;
|
||||
#endif
|
||||
|
|
@ -1659,14 +1635,10 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
|
|||
if ((profile != PIPE_VIDEO_PROFILE_HEVC_MAIN) &&
|
||||
(profile != PIPE_VIDEO_PROFILE_HEVC_MAIN_10))
|
||||
{
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
// Video encode support in underlying d3d12 device needs ID3D12VideoDevice4
|
||||
// for this HEVC 422/444 d3d12 gallium driver implementation
|
||||
ComPtr<ID3D12VideoDevice4> spD3D12VideoDevice4;
|
||||
bRuntimeSupportsProfile = SUCCEEDED(spD3D12VideoDevice->QueryInterface(IID_PPV_ARGS(spD3D12VideoDevice4.GetAddressOf())));
|
||||
#else
|
||||
bRuntimeSupportsProfile = false;
|
||||
#endif // D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
}
|
||||
|
||||
D3D12_VIDEO_ENCODER_PROFILE_DESC profDesc = {};
|
||||
|
|
@ -1903,76 +1875,68 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
|
|||
supportsProfile = supportsProfile &&
|
||||
d3d12_video_encode_supported_resolution_range(codecDesc, minRes, maxRes, alignRes, spD3D12VideoDevice.Get());
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData1 = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS1 resolutionDepCaps;
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 capEncoderSupportData1 = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS resolutionDepCaps;
|
||||
#endif
|
||||
capEncoderSupportData1.SubregionFrameEncoding = (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE) ?
|
||||
capEncoderSupportData.SubregionFrameEncoding = (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE) ?
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_FULL_FRAME :
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_UNIFORM_PARTITIONING_SUBREGIONS_PER_FRAME;
|
||||
D3D12_VIDEO_ENCODER_PICTURE_CONTROL_SUBREGIONS_LAYOUT_DATA_SLICES sliceData = { };
|
||||
capEncoderSupportData1.SubregionFrameEncodingData.DataSize = sizeof(sliceData);
|
||||
capEncoderSupportData1.SubregionFrameEncodingData.pSlicesPartition_HEVC = &sliceData;
|
||||
capEncoderSupportData.SubregionFrameEncodingData.DataSize = sizeof(sliceData);
|
||||
capEncoderSupportData.SubregionFrameEncodingData.pSlicesPartition_HEVC = &sliceData;
|
||||
d3d12_encode_support_cap_allocations cap_allocations = {};
|
||||
supportsProfile = supportsProfile && d3d12_video_encode_support_caps(codecDesc,
|
||||
maxRes,
|
||||
encodeFormat,
|
||||
spD3D12VideoDevice.Get(),
|
||||
d3d12_codec_support,
|
||||
capEncoderSupportData1,
|
||||
capEncoderSupportData,
|
||||
resolutionDepCaps,
|
||||
maxQualityLevels,
|
||||
cap_allocations,
|
||||
saqSupport);
|
||||
bVideoEncodeRequiresTextureArray = (capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RECONSTRUCTED_FRAMES_REQUIRE_TEXTURE_ARRAYS) != 0;
|
||||
bVideoEncodeRequiresTextureArray = (capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RECONSTRUCTED_FRAMES_REQUIRE_TEXTURE_ARRAYS) != 0;
|
||||
if (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE)
|
||||
maxSlices = 0;
|
||||
else
|
||||
maxSlices = resolutionDepCaps.MaxSubregionsNumber;
|
||||
|
||||
maxIRDuration = resolutionDepCaps.MaxIntraRefreshFrameDuration;
|
||||
isRCMaxFrameSizeSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0) ? 1 : 0;
|
||||
isRCMaxFrameSizeSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0) ? 1 : 0;
|
||||
|
||||
memset(&roi_support, 0, sizeof(roi_support));
|
||||
roi_support.bits.roi_rc_qp_delta_support = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0) ? 1 : 0;
|
||||
roi_support.bits.roi_rc_qp_delta_support = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0) ? 1 : 0;
|
||||
roi_support.bits.num_roi_regions = roi_support.bits.roi_rc_qp_delta_support ? PIPE_ENC_ROI_REGION_NUM_MAX : 0;
|
||||
roi_support.bits.log2_roi_min_block_pixel_size = static_cast<uint32_t>(std::log2(capEncoderSupportData1.pResolutionDependentSupport[0].QPMapRegionPixelsSize));
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
roi_support.bits.log2_roi_min_block_pixel_size = static_cast<uint32_t>(std::log2(capEncoderSupportData.pResolutionDependentSupport[0].QPMapRegionPixelsSize));
|
||||
|
||||
D3D12_VIDEO_ENCODER_INPUT_MAP_SESSION_INFO sessionInfo =
|
||||
{
|
||||
// D3D12_VIDEO_ENCODER_CODEC Codec;
|
||||
capEncoderSupportData1.Codec,
|
||||
capEncoderSupportData.Codec,
|
||||
// D3D12_VIDEO_ENCODER_PROFILE_DESC Profile;
|
||||
profDesc,
|
||||
// D3D12_VIDEO_ENCODER_LEVEL_SETTING Level;
|
||||
maxLvl,
|
||||
// DXGI_FORMAT InputFormat;
|
||||
capEncoderSupportData1.InputFormat,
|
||||
capEncoderSupportData.InputFormat,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_RESOLUTION_DESC InputResolution;
|
||||
capEncoderSupportData1.pResolutionList[0],
|
||||
capEncoderSupportData.pResolutionList[0],
|
||||
// D3D12_VIDEO_ENCODER_CODEC_CONFIGURATION CodecConfiguration;
|
||||
capEncoderSupportData1.CodecConfiguration,
|
||||
capEncoderSupportData.CodecConfiguration,
|
||||
// D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE SubregionFrameEncoding;
|
||||
capEncoderSupportData1.SubregionFrameEncoding,
|
||||
capEncoderSupportData.SubregionFrameEncoding,
|
||||
// D3D12_VIDEO_ENCODER_PICTURE_CONTROL_SUBREGIONS_LAYOUT_DATA SubregionFrameEncodingData;
|
||||
capEncoderSupportData1.SubregionFrameEncodingData,
|
||||
capEncoderSupportData.SubregionFrameEncodingData,
|
||||
};
|
||||
|
||||
dirty_rects_support = get_dirty_rects_support(sessionInfo, spD3D12VideoDevice.Get(), D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_CPU_BUFFER);
|
||||
dirty_rects_support_gpu = get_dirty_rects_support(sessionInfo, spD3D12VideoDevice.Get(), D3D12_VIDEO_ENCODER_INPUT_MAP_SOURCE_GPU_TEXTURE);
|
||||
move_rects_support = get_move_rects_support(sessionInfo, spD3D12VideoDevice.Get());
|
||||
get_gpu_output_stats_support(sessionInfo, capEncoderSupportData1.SupportFlags, spD3D12VideoDevice.Get(), gpu_stats_qp, gpu_stats_satd, gpu_stats_rcbits, psnr_support);
|
||||
sliced_encode_support = get_sliced_encode_support(capEncoderSupportData1.SupportFlags);
|
||||
get_gpu_output_stats_support(sessionInfo, capEncoderSupportData.SupportFlags, spD3D12VideoDevice.Get(), gpu_stats_qp, gpu_stats_satd, gpu_stats_rcbits, psnr_support);
|
||||
sliced_encode_support = get_sliced_encode_support(capEncoderSupportData.SupportFlags);
|
||||
gpu_motion_input_support = get_motion_gpuinput_support(sessionInfo, spD3D12VideoDevice.Get());
|
||||
two_pass_support = query_two_pass_support(pscreen, sessionInfo, spD3D12VideoDevice.Get(),
|
||||
((capEncoderSupportData1.SupportFlags &
|
||||
((capEncoderSupportData.SupportFlags &
|
||||
D3D12_VIDEO_ENCODER_SUPPORT_FLAG_READABLE_RECONSTRUCTED_PICTURE_LAYOUT_AVAILABLE) != 0));
|
||||
#endif
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
|
@ -2208,31 +2172,26 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
|
|||
|
||||
DXGI_FORMAT encodeFormat = d3d12_convert_pipe_video_profile_to_dxgi_format(profile);
|
||||
|
||||
#if D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData1 = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT2 capEncoderSupportData = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS1 resolutionDepCaps;
|
||||
#else
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_SUPPORT1 capEncoderSupportData1 = {};
|
||||
D3D12_FEATURE_DATA_VIDEO_ENCODER_RESOLUTION_SUPPORT_LIMITS resolutionDepCaps;
|
||||
#endif
|
||||
capEncoderSupportData1.SubregionFrameEncoding = (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE) ?
|
||||
capEncoderSupportData.SubregionFrameEncoding = (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE) ?
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_FULL_FRAME :
|
||||
D3D12_VIDEO_ENCODER_FRAME_SUBREGION_LAYOUT_MODE_UNIFORM_GRID_PARTITION;
|
||||
|
||||
capEncoderSupportData1.SubregionFrameEncodingData.DataSize = sizeof(av1TileSupport.TilesConfiguration);
|
||||
capEncoderSupportData1.SubregionFrameEncodingData.pTilesPartition_AV1 = &av1TileSupport.TilesConfiguration;
|
||||
capEncoderSupportData.SubregionFrameEncodingData.DataSize = sizeof(av1TileSupport.TilesConfiguration);
|
||||
capEncoderSupportData.SubregionFrameEncodingData.pTilesPartition_AV1 = &av1TileSupport.TilesConfiguration;
|
||||
d3d12_encode_support_cap_allocations cap_allocations = {};
|
||||
supportsProfile = supportsProfile && d3d12_video_encode_support_caps(codecDesc,
|
||||
maxRes,
|
||||
encodeFormat,
|
||||
spD3D12VideoDevice.Get(),
|
||||
d3d12_codec_support,
|
||||
capEncoderSupportData1,
|
||||
capEncoderSupportData,
|
||||
resolutionDepCaps,
|
||||
maxQualityLevels,
|
||||
cap_allocations,
|
||||
saqSupport);
|
||||
bVideoEncodeRequiresTextureArray = (capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RECONSTRUCTED_FRAMES_REQUIRE_TEXTURE_ARRAYS) != 0;
|
||||
bVideoEncodeRequiresTextureArray = (capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RECONSTRUCTED_FRAMES_REQUIRE_TEXTURE_ARRAYS) != 0;
|
||||
if (supportedSliceStructures == PIPE_VIDEO_CAP_SLICE_STRUCTURE_NONE)
|
||||
maxSlices = 0;
|
||||
else
|
||||
|
|
@ -2241,11 +2200,11 @@ d3d12_has_video_encode_support(struct pipe_screen *pscreen,
|
|||
maxIRDuration = resolutionDepCaps.MaxIntraRefreshFrameDuration;
|
||||
codecSupport.av1_support.features_ext2.bits.max_tile_num_minus1 = maxSlices - 1;
|
||||
|
||||
isRCMaxFrameSizeSupported = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0) ? 1 : 0;
|
||||
isRCMaxFrameSizeSupported = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_MAX_FRAME_SIZE_AVAILABLE) != 0) ? 1 : 0;
|
||||
memset(&roi_support, 0, sizeof(roi_support));
|
||||
roi_support.bits.roi_rc_qp_delta_support = ((capEncoderSupportData1.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0) ? 1 : 0;
|
||||
roi_support.bits.roi_rc_qp_delta_support = ((capEncoderSupportData.SupportFlags & D3D12_VIDEO_ENCODER_SUPPORT_FLAG_RATE_CONTROL_DELTA_QP_AVAILABLE) != 0) ? 1 : 0;
|
||||
roi_support.bits.num_roi_regions = roi_support.bits.roi_rc_qp_delta_support ? PIPE_ENC_ROI_REGION_NUM_MAX : 0;
|
||||
roi_support.bits.log2_roi_min_block_pixel_size = static_cast<uint32_t>(std::log2(capEncoderSupportData1.pResolutionDependentSupport[0].QPMapRegionPixelsSize));
|
||||
roi_support.bits.log2_roi_min_block_pixel_size = static_cast<uint32_t>(std::log2(capEncoderSupportData.pResolutionDependentSupport[0].QPMapRegionPixelsSize));
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
|
|
|||
|
|
@ -42,8 +42,6 @@ using Microsoft::WRL::ComPtr;
|
|||
|
||||
#define D3D12_VIDEO_ANY_DECODER_ENABLED (VIDEO_CODEC_H264DEC || VIDEO_CODEC_H265DEC || VIDEO_CODEC_AV1DEC || VIDEO_CODEC_VP9DEC)
|
||||
|
||||
#define D3D12_VIDEO_USE_NEW_ENCODECMDLIST4_INTERFACE (USE_D3D12_PREVIEW_HEADERS && (D3D12_PREVIEW_SDK_VERSION >= 717))
|
||||
|
||||
#if !defined(_WIN32) || defined(_MSC_VER)
|
||||
inline D3D12_VIDEO_DECODER_HEAP_DESC
|
||||
GetDesc(ID3D12VideoDecoderHeap *heap)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue