etnaviv/nn: Enable fused ReLU activation

Handle fused ReLU activation by setting the corresponding
flag in the NN engine configuration.

Reviewed-by: Tomeu Vizoso <tomeu@tomeuvizoso.net>
Signed-off-by: Philipp Zabel <p.zabel@pengutronix.de>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/31981>
This commit is contained in:
Philipp Zabel 2024-09-18 13:00:02 +02:00 committed by Marge Bot
parent 01dc82bc63
commit 7805a20d7b
2 changed files with 3 additions and 1 deletions

View file

@ -69,6 +69,7 @@ struct etna_operation {
bool pointwise;
bool pooling_first_pixel;
bool padding_same;
bool relu;
unsigned stride;

View file

@ -432,6 +432,7 @@ etna_ml_lower_convolution(struct etna_ml_subgraph *subgraph,
operation->addition = false;
operation->depthwise = poperation->conv.depthwise;
operation->pointwise = poperation->conv.pointwise;
operation->relu = poperation->conv.relu;
operation->pooling_first_pixel = calc_pooling_first_pixel(subgraph, poperation);
operation->padding_same = poperation->conv.padding_same;
operation->stride = poperation->conv.stride_x;
@ -692,7 +693,7 @@ create_nn_config(struct etna_ml_subgraph *subgraph, const struct etna_operation
map->nn_layer_flush = 0x1;
map->brick_mode = 0x0;
map->brick_distance = 0x0;
map->relu = 0x0;
map->relu = operation->relu;
map->no_flush = nn_core_version == 8;
map->rounding_mode = 0x1;
map->partial_cache_data_unit = 0x0;