mirror of
https://gitlab.freedesktop.org/mesa/mesa.git
synced 2025-12-25 04:20:08 +01:00
vl: add entrypoint to get_video_param
Signed-off-by: Christian König <christian.koenig@amd.com>
This commit is contained in:
parent
f2f7064e56
commit
a15cbabb8b
24 changed files with 58 additions and 23 deletions
|
|
@ -33,7 +33,8 @@
|
|||
#include "vl_mpeg12_decoder.h"
|
||||
|
||||
bool
|
||||
vl_profile_supported(struct pipe_screen *screen, enum pipe_video_profile profile)
|
||||
vl_profile_supported(struct pipe_screen *screen, enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint)
|
||||
{
|
||||
assert(screen);
|
||||
switch (u_reduce_video_profile(profile)) {
|
||||
|
|
@ -74,6 +75,7 @@ vl_create_decoder(struct pipe_context *pipe,
|
|||
(
|
||||
pipe->screen,
|
||||
templat->profile,
|
||||
templat->entrypoint,
|
||||
PIPE_VIDEO_CAP_NPOT_TEXTURES
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@
|
|||
* check if a given profile is supported with shader based decoding
|
||||
*/
|
||||
bool
|
||||
vl_profile_supported(struct pipe_screen *screen, enum pipe_video_profile profile);
|
||||
vl_profile_supported(struct pipe_screen *screen, enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint);
|
||||
|
||||
/**
|
||||
* get the maximum supported level for the given profile with shader based decoding
|
||||
|
|
|
|||
|
|
@ -406,6 +406,7 @@ vl_video_buffer_create(struct pipe_context *pipe,
|
|||
(
|
||||
pipe->screen,
|
||||
PIPE_VIDEO_PROFILE_UNKNOWN,
|
||||
PIPE_VIDEO_ENTRYPOINT_UNKNOWN,
|
||||
PIPE_VIDEO_CAP_NPOT_TEXTURES
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -152,11 +152,12 @@ ilo_get_shader_param(struct pipe_screen *screen, unsigned shader,
|
|||
static int
|
||||
ilo_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
case PIPE_VIDEO_CAP_SUPPORTED:
|
||||
return vl_profile_supported(screen, profile);
|
||||
return vl_profile_supported(screen, profile, entrypoint);
|
||||
case PIPE_VIDEO_CAP_NPOT_TEXTURES:
|
||||
return 1;
|
||||
case PIPE_VIDEO_CAP_MAX_WIDTH:
|
||||
|
|
|
|||
|
|
@ -834,11 +834,12 @@ error:
|
|||
static int
|
||||
nouveau_screen_get_video_param(struct pipe_screen *pscreen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
case PIPE_VIDEO_CAP_SUPPORTED:
|
||||
return vl_profile_supported(pscreen, profile);
|
||||
return vl_profile_supported(pscreen, profile, entrypoint);
|
||||
case PIPE_VIDEO_CAP_NPOT_TEXTURES:
|
||||
return 1;
|
||||
case PIPE_VIDEO_CAP_MAX_WIDTH:
|
||||
|
|
|
|||
|
|
@ -353,6 +353,7 @@ nouveau_vp3_load_firmware(struct nouveau_vp3_decoder *dec,
|
|||
int
|
||||
nouveau_vp3_screen_get_video_param(struct pipe_screen *pscreen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
int chipset = nouveau_screen(pscreen)->device->chipset;
|
||||
|
|
|
|||
|
|
@ -220,6 +220,7 @@ nouveau_vp3_vp_caps(struct nouveau_vp3_decoder *dec, union pipe_desc desc,
|
|||
int
|
||||
nouveau_vp3_screen_get_video_param(struct pipe_screen *pscreen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param);
|
||||
|
||||
boolean
|
||||
|
|
|
|||
|
|
@ -301,6 +301,7 @@ nv84_video_buffer_create(struct pipe_context *pipe,
|
|||
int
|
||||
nv84_screen_get_video_param(struct pipe_screen *pscreen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param);
|
||||
|
||||
boolean
|
||||
|
|
|
|||
|
|
@ -744,6 +744,7 @@ error:
|
|||
int
|
||||
nv84_screen_get_video_param(struct pipe_screen *pscreen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
|
|
|
|||
|
|
@ -341,11 +341,12 @@ static float r300_get_paramf(struct pipe_screen* pscreen,
|
|||
|
||||
static int r300_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
case PIPE_VIDEO_CAP_SUPPORTED:
|
||||
return vl_profile_supported(screen, profile);
|
||||
return vl_profile_supported(screen, profile, entrypoint);
|
||||
case PIPE_VIDEO_CAP_NPOT_TEXTURES:
|
||||
return 0;
|
||||
case PIPE_VIDEO_CAP_MAX_WIDTH:
|
||||
|
|
|
|||
|
|
@ -777,11 +777,12 @@ static int r600_get_shader_param(struct pipe_screen* pscreen, unsigned shader, e
|
|||
|
||||
static int r600_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
case PIPE_VIDEO_CAP_SUPPORTED:
|
||||
return vl_profile_supported(screen, profile);
|
||||
return vl_profile_supported(screen, profile, entrypoint);
|
||||
case PIPE_VIDEO_CAP_NPOT_TEXTURES:
|
||||
return 1;
|
||||
case PIPE_VIDEO_CAP_MAX_WIDTH:
|
||||
|
|
|
|||
|
|
@ -898,6 +898,7 @@ struct pipe_video_buffer *r600_video_buffer_create(struct pipe_context *pipe,
|
|||
|
||||
int r600_uvd_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param);
|
||||
|
||||
/*
|
||||
|
|
|
|||
|
|
@ -174,6 +174,7 @@ struct pipe_video_codec *r600_uvd_create_decoder(struct pipe_context *context,
|
|||
|
||||
int r600_uvd_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
struct r600_screen *rscreen = (struct r600_screen *)screen;
|
||||
|
|
@ -194,5 +195,5 @@ int r600_uvd_get_video_param(struct pipe_screen *screen,
|
|||
}
|
||||
}
|
||||
|
||||
return ruvd_get_video_param(screen, profile, param);
|
||||
return ruvd_get_video_param(screen, profile, entrypoint, param);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1075,6 +1075,7 @@ void ruvd_set_dt_surfaces(struct ruvd_msg *msg, struct radeon_surface *luma,
|
|||
|
||||
int ruvd_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
|
|
|
|||
|
|
@ -363,6 +363,7 @@ void ruvd_set_dt_surfaces(struct ruvd_msg *msg, struct radeon_surface *luma,
|
|||
/* returns supported codecs and other parameters */
|
||||
int ruvd_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param);
|
||||
|
||||
/* the hardware only supports NV12 */
|
||||
|
|
|
|||
|
|
@ -540,11 +540,12 @@ static int r600_get_shader_param(struct pipe_screen* pscreen, unsigned shader, e
|
|||
|
||||
static int r600_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
case PIPE_VIDEO_CAP_SUPPORTED:
|
||||
return vl_profile_supported(screen, profile);
|
||||
return vl_profile_supported(screen, profile, entrypoint);
|
||||
case PIPE_VIDEO_CAP_NPOT_TEXTURES:
|
||||
return 1;
|
||||
case PIPE_VIDEO_CAP_MAX_WIDTH:
|
||||
|
|
|
|||
|
|
@ -249,11 +249,12 @@ softpipe_get_paramf(struct pipe_screen *screen, enum pipe_capf param)
|
|||
static int
|
||||
softpipe_get_video_param(struct pipe_screen *screen,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param)
|
||||
{
|
||||
switch (param) {
|
||||
case PIPE_VIDEO_CAP_SUPPORTED:
|
||||
return vl_profile_supported(screen, profile);
|
||||
return vl_profile_supported(screen, profile, entrypoint);
|
||||
case PIPE_VIDEO_CAP_NPOT_TEXTURES:
|
||||
return 0;
|
||||
case PIPE_VIDEO_CAP_MAX_WIDTH:
|
||||
|
|
|
|||
|
|
@ -94,6 +94,7 @@ struct pipe_screen {
|
|||
*/
|
||||
int (*get_video_param)( struct pipe_screen *,
|
||||
enum pipe_video_profile profile,
|
||||
enum pipe_video_entrypoint entrypoint,
|
||||
enum pipe_video_cap param );
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -76,6 +76,7 @@ vlVdpDecoderCreate(VdpDevice device,
|
|||
(
|
||||
screen,
|
||||
templat.profile,
|
||||
PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_SUPPORTED
|
||||
);
|
||||
if (!supported) {
|
||||
|
|
@ -459,8 +460,10 @@ vlVdpDecoderRender(VdpDecoder decoder,
|
|||
|
||||
pipe_mutex_lock(vlsurf->device->mutex);
|
||||
|
||||
buffer_support[0] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE);
|
||||
buffer_support[1] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_CAP_SUPPORTS_INTERLACED);
|
||||
buffer_support[0] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE);
|
||||
buffer_support[1] = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_SUPPORTS_INTERLACED);
|
||||
|
||||
if (vlsurf->video_buffer == NULL ||
|
||||
!screen->is_video_format_supported(screen, vlsurf->video_buffer->buffer_format, dec->profile) ||
|
||||
|
|
@ -471,10 +474,12 @@ vlVdpDecoderRender(VdpDecoder decoder,
|
|||
vlsurf->video_buffer->destroy(vlsurf->video_buffer);
|
||||
|
||||
/* set the buffer format to the prefered one */
|
||||
vlsurf->templat.buffer_format = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_CAP_PREFERED_FORMAT);
|
||||
vlsurf->templat.buffer_format = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_PREFERED_FORMAT);
|
||||
|
||||
/* also set interlacing to decoders preferences */
|
||||
vlsurf->templat.interlaced = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_CAP_PREFERS_INTERLACED);
|
||||
vlsurf->templat.interlaced = screen->get_video_param(screen, dec->profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_PREFERS_INTERLACED);
|
||||
|
||||
/* and recreate the video buffer */
|
||||
vlsurf->video_buffer = dec->context->create_video_buffer(dec->context, &vlsurf->templat);
|
||||
|
|
|
|||
|
|
@ -132,8 +132,8 @@ vlVdpVideoMixerCreate(VdpDevice device,
|
|||
VDPAU_MSG(VDPAU_WARN, "[VDPAU] Max layers > 4 not supported\n", vmixer->max_layers);
|
||||
goto no_params;
|
||||
}
|
||||
max_width = screen->get_video_param(screen, prof, PIPE_VIDEO_CAP_MAX_WIDTH);
|
||||
max_height = screen->get_video_param(screen, prof, PIPE_VIDEO_CAP_MAX_HEIGHT);
|
||||
max_width = screen->get_video_param(screen, prof, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_WIDTH);
|
||||
max_height = screen->get_video_param(screen, prof, PIPE_VIDEO_ENTRYPOINT_BITSTREAM, PIPE_VIDEO_CAP_MAX_HEIGHT);
|
||||
if (vmixer->video_width < 48 ||
|
||||
vmixer->video_width > max_width) {
|
||||
VDPAU_MSG(VDPAU_WARN, "[VDPAU] 48 < %u < %u not valid for width\n", vmixer->video_width, max_width);
|
||||
|
|
|
|||
|
|
@ -178,11 +178,15 @@ vlVdpDecoderQueryCapabilities(VdpDevice device, VdpDecoderProfile profile,
|
|||
}
|
||||
|
||||
pipe_mutex_lock(dev->mutex);
|
||||
*is_supported = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_CAP_SUPPORTED);
|
||||
*is_supported = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_SUPPORTED);
|
||||
if (*is_supported) {
|
||||
*max_width = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_CAP_MAX_WIDTH);
|
||||
*max_height = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_CAP_MAX_HEIGHT);
|
||||
*max_level = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_CAP_MAX_LEVEL);
|
||||
*max_width = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_MAX_WIDTH);
|
||||
*max_height = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_MAX_HEIGHT);
|
||||
*max_level = pscreen->get_video_param(pscreen, p_profile, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_MAX_LEVEL);
|
||||
*max_macroblocks = (*max_width/16)*(*max_height/16);
|
||||
} else {
|
||||
*max_width = 0;
|
||||
|
|
@ -512,11 +516,13 @@ vlVdpVideoMixerQueryParameterValueRange(VdpDevice device, VdpVideoMixerParameter
|
|||
switch (parameter) {
|
||||
case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH:
|
||||
*(uint32_t*)min_value = 48;
|
||||
*(uint32_t*)max_value = screen->get_video_param(screen, prof, PIPE_VIDEO_CAP_MAX_WIDTH);
|
||||
*(uint32_t*)max_value = screen->get_video_param(screen, prof, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_MAX_WIDTH);
|
||||
break;
|
||||
case VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT:
|
||||
*(uint32_t*)min_value = 48;
|
||||
*(uint32_t*)max_value = screen->get_video_param(screen, prof, PIPE_VIDEO_CAP_MAX_HEIGHT);
|
||||
*(uint32_t*)max_value = screen->get_video_param(screen, prof, PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_MAX_HEIGHT);
|
||||
break;
|
||||
|
||||
case VDP_VIDEO_MIXER_PARAMETER_LAYERS:
|
||||
|
|
|
|||
|
|
@ -75,6 +75,7 @@ vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
|
|||
(
|
||||
pipe->screen,
|
||||
PIPE_VIDEO_PROFILE_UNKNOWN,
|
||||
PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_PREFERED_FORMAT
|
||||
);
|
||||
p_surf->templat.chroma_format = ChromaToPipe(chroma_type);
|
||||
|
|
@ -84,6 +85,7 @@ vlVdpVideoSurfaceCreate(VdpDevice device, VdpChromaType chroma_type,
|
|||
(
|
||||
pipe->screen,
|
||||
PIPE_VIDEO_PROFILE_UNKNOWN,
|
||||
PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
|
||||
PIPE_VIDEO_CAP_PREFERS_INTERLACED
|
||||
);
|
||||
p_surf->video_buffer = pipe->create_video_buffer(pipe, &p_surf->templat);
|
||||
|
|
|
|||
|
|
@ -227,6 +227,7 @@ Status XvMCCreateSubpicture(Display *dpy, XvMCContext *context, XvMCSubpicture *
|
|||
tex_templ.last_level = 0;
|
||||
if (pipe->screen->get_video_param(pipe->screen,
|
||||
PIPE_VIDEO_PROFILE_UNKNOWN,
|
||||
PIPE_VIDEO_ENTRYPOINT_UNKNOWN,
|
||||
PIPE_VIDEO_CAP_NPOT_TEXTURES)) {
|
||||
tex_templ.width0 = width;
|
||||
tex_templ.height0 = height;
|
||||
|
|
|
|||
|
|
@ -177,7 +177,8 @@ Status XvMCCreateSurface(Display *dpy, XvMCContext *context, XvMCSurface *surfac
|
|||
tmpl.buffer_format = pipe->screen->get_video_param
|
||||
(
|
||||
pipe->screen,
|
||||
PIPE_VIDEO_PROFILE_MPEG2_MAIN,
|
||||
context_priv->decoder->profile,
|
||||
context_priv->decoder->entrypoint,
|
||||
PIPE_VIDEO_CAP_PREFERED_FORMAT
|
||||
);
|
||||
tmpl.chroma_format = context_priv->decoder->chroma_format;
|
||||
|
|
@ -186,7 +187,8 @@ Status XvMCCreateSurface(Display *dpy, XvMCContext *context, XvMCSurface *surfac
|
|||
tmpl.interlaced = pipe->screen->get_video_param
|
||||
(
|
||||
pipe->screen,
|
||||
PIPE_VIDEO_PROFILE_MPEG2_MAIN,
|
||||
context_priv->decoder->profile,
|
||||
context_priv->decoder->entrypoint,
|
||||
PIPE_VIDEO_CAP_PREFERS_INTERLACED
|
||||
);
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue