teflon: Support ReLU activation for ADD ops

ADD operations can have fused ReLU activations. Add the setting to the
operation state.

Signed-off-by: Rob Herring (Arm) <robh@kernel.org>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/40525>
This commit is contained in:
Rob Herring (Arm) 2026-03-19 17:17:58 -05:00 committed by Marge Bot
parent 3780fb8494
commit 69d1da3518
2 changed files with 13 additions and 1 deletions

View file

@ -182,9 +182,14 @@ fill_operation(struct teflon_delegate *delegate, TfLiteContext *tf_context, TfLi
operation->pooling.padding_same = params->padding == kTfLitePaddingSame;
break;
}
case kTfLiteBuiltinAdd:
case kTfLiteBuiltinAdd: {
TfLiteAddParams *params = (TfLiteAddParams *)node->builtin_data;
operation->type = PIPE_ML_OPERATION_TYPE_ADD;
operation->add.relu = params->activation == kTfLiteActRelu ||
params->activation == kTfLiteActRelu6;
break;
}
case kTfLiteBuiltinConcatenation: {
TfLiteConcatenationParams *params = node->builtin_data;

View file

@ -1220,6 +1220,13 @@ struct pipe_ml_operation
bool relu;
} fcon;
struct {
/**
* Whether a ReLU activation should be applied to the output.
*/
bool relu;
} add;
struct {
/**
* Dimension along which the tensors are concatenated.