mirror of
https://gitlab.freedesktop.org/mesa/mesa.git
synced 2026-03-24 00:30:34 +01:00
teflon: Support ReLU activation for ADD ops
ADD operations can have fused ReLU activations. Add the setting to the operation state. Signed-off-by: Rob Herring (Arm) <robh@kernel.org> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/40525>
This commit is contained in:
parent
3780fb8494
commit
69d1da3518
2 changed files with 13 additions and 1 deletions
|
|
@ -182,9 +182,14 @@ fill_operation(struct teflon_delegate *delegate, TfLiteContext *tf_context, TfLi
|
|||
operation->pooling.padding_same = params->padding == kTfLitePaddingSame;
|
||||
break;
|
||||
}
|
||||
case kTfLiteBuiltinAdd:
|
||||
case kTfLiteBuiltinAdd: {
|
||||
TfLiteAddParams *params = (TfLiteAddParams *)node->builtin_data;
|
||||
|
||||
operation->type = PIPE_ML_OPERATION_TYPE_ADD;
|
||||
operation->add.relu = params->activation == kTfLiteActRelu ||
|
||||
params->activation == kTfLiteActRelu6;
|
||||
break;
|
||||
}
|
||||
case kTfLiteBuiltinConcatenation: {
|
||||
TfLiteConcatenationParams *params = node->builtin_data;
|
||||
|
||||
|
|
|
|||
|
|
@ -1220,6 +1220,13 @@ struct pipe_ml_operation
|
|||
bool relu;
|
||||
} fcon;
|
||||
|
||||
struct {
|
||||
/**
|
||||
* Whether a ReLU activation should be applied to the output.
|
||||
*/
|
||||
bool relu;
|
||||
} add;
|
||||
|
||||
struct {
|
||||
/**
|
||||
* Dimension along which the tensors are concatenated.
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue