diff --git a/src/gallium/frontends/mediafoundation/context.h b/src/gallium/frontends/mediafoundation/context.h index b711aeda250..ec2a9ace907 100644 --- a/src/gallium/frontends/mediafoundation/context.h +++ b/src/gallium/frontends/mediafoundation/context.h @@ -137,6 +137,28 @@ typedef class DX12EncodeContext return result; } + void SetPipeQPMapBufferInfo(void *pQPMap, const uint32_t QPMapSize) + { + switch( m_Codec ) + { + case D3D12_VIDEO_ENCODER_CODEC_H264: + encoderPicInfo.h264enc.input_qpmap_info.input_qpmap_cpu = static_cast( pQPMap ); + encoderPicInfo.h264enc.input_qpmap_info.qp_map_values_count = QPMapSize / sizeof(int8_t); + encoderPicInfo.h264enc.input_qpmap_info.input_qp_mode = PIPE_ENC_QPMAP_INPUT_MODE_CPU_BUFFER_8BIT; + break; + case D3D12_VIDEO_ENCODER_CODEC_HEVC: + encoderPicInfo.h265enc.input_qpmap_info.input_qpmap_cpu = static_cast( pQPMap ); + encoderPicInfo.h265enc.input_qpmap_info.qp_map_values_count = QPMapSize / sizeof( int8_t ); + encoderPicInfo.h265enc.input_qpmap_info.input_qp_mode = PIPE_ENC_QPMAP_INPUT_MODE_CPU_BUFFER_8BIT; + break; + case D3D12_VIDEO_ENCODER_CODEC_AV1: + encoderPicInfo.av1enc.input_qpmap_info.input_qpmap_cpu = static_cast( pQPMap ); + encoderPicInfo.av1enc.input_qpmap_info.qp_map_values_count = QPMapSize / sizeof( int16_t ); + encoderPicInfo.av1enc.input_qpmap_info.input_qp_mode = PIPE_ENC_QPMAP_INPUT_MODE_CPU_BUFFER_16BIT; + break; + } + } + UINT32 GetFrameRateDenominator() { UINT32 result = 0; diff --git a/src/gallium/frontends/mediafoundation/mftransform.cpp b/src/gallium/frontends/mediafoundation/mftransform.cpp index 96102f867b4..2259fecda6e 100644 --- a/src/gallium/frontends/mediafoundation/mftransform.cpp +++ b/src/gallium/frontends/mediafoundation/mftransform.cpp @@ -2100,6 +2100,38 @@ done: return hr; } +static HRESULT +GetQPMapBufferFromSampleLockHeld( IMFSample *pSample, BYTE **ppData, DWORD *pSize, ComPtr &outBuffer ) +{ + if( !pSample || !ppData || !pSize ) + return E_POINTER; + + ComPtr attrs; + HRESULT hr = pSample->QueryInterface( IID_PPV_ARGS( &attrs ) ); + if( FAILED( hr ) || !attrs ) + return hr; + + ComPtr unk; + hr = attrs->GetUnknown( MFSampleExtension_VideoEncodeInputDeltaQPMap, IID_PPV_ARGS( &unk ) ); + if( FAILED( hr ) || !unk ) + return hr; + + hr = unk.As( &outBuffer ); + if( FAILED( hr ) || !outBuffer ) + return hr; + + BYTE *pData = nullptr; + DWORD maxLen = 0, curLen = 0; + hr = outBuffer->Lock( &pData, &maxLen, &curLen ); + if( FAILED( hr ) ) + return hr; + + *ppData = pData; + *pSize = curLen; + + return S_OK; +} + // IMFTransform::ProcessInput // https://learn.microsoft.com/en-us/windows/win32/api/mftransform/nf-mftransform-imftransform-processinput HRESULT @@ -2109,6 +2141,9 @@ CDX12EncHMFT::ProcessInput( DWORD dwInputStreamIndex, IMFSample *pSample, DWORD HRESULT hr = S_OK; UINT32 unChromaOnly = 0; LPDX12EncodeContext pDX12EncodeContext = nullptr; + BYTE *qpData = nullptr; + DWORD qpSize = 0; + ComPtr qpMapBuffer; std::lock_guard lock( m_lock ); CHECKHR_GOTO( IsUnlocked(), done ); @@ -2143,6 +2178,17 @@ CDX12EncHMFT::ProcessInput( DWORD dwInputStreamIndex, IMFSample *pSample, DWORD // setup the source buffer CHECKHR_HRGOTO( PrepareForEncode( pSample, &pDX12EncodeContext ), MF_E_INVALIDMEDIATYPE, done ); + if( SUCCEEDED( GetQPMapBufferFromSampleLockHeld( pSample, &qpData, &qpSize, qpMapBuffer ) ) && qpMapBuffer ) + { + pDX12EncodeContext->SetPipeQPMapBufferInfo( qpData, qpSize ); + } + else + { + // make sure it's null/zero if we failed to get it + qpData = nullptr; + qpSize = 0; + qpMapBuffer = nullptr; + } // Submit work { @@ -2194,6 +2240,11 @@ CDX12EncHMFT::ProcessInput( DWORD dwInputStreamIndex, IMFSample *pSample, DWORD m_pPipeVideoCodec->flush( m_pPipeVideoCodec ); HMFT_ETW_EVENT_STOP( "PipeFlush", this ); } + // Release the QP map buffer after encode_bitstream call returns. + if( qpMapBuffer && qpSize != 0 && qpData != nullptr ) + { + qpMapBuffer->Unlock(); + } m_EncodingQueue.push( pDX12EncodeContext ); // Moves the GOP tracker state to the next frame for having next // frame data in get_frame_descriptor() for next iteration