From ad0ea175d56599f48cb98f401b4c7cfa19b3acb7 Mon Sep 17 00:00:00 2001
From: Philipp Zabel
Date: Mon, 16 Sep 2024 17:10:44 +0200
Subject: [PATCH] teflon: Mark dilated convolutions and fused activation as not
supported
Dilation and fused activations are not yet implemented.
Mark them as unsupported for now.
Reviewed-by: Tomeu Vizoso
Signed-off-by: Philipp Zabel
Part-of:
---
src/gallium/frontends/teflon/tfl_device.c | 36 +++++++++++++++++++++--
1 file changed, 34 insertions(+), 2 deletions(-)
diff --git a/src/gallium/frontends/teflon/tfl_device.c b/src/gallium/frontends/teflon/tfl_device.c
index bde022b90d7..b5bffc2fe38 100644
--- a/src/gallium/frontends/teflon/tfl_device.c
+++ b/src/gallium/frontends/teflon/tfl_device.c
@@ -115,6 +115,11 @@ fill_operation(struct teflon_delegate *delegate, TfLiteContext *tf_context, TfLi
if (node_registration->builtin_code == kTfLiteBuiltinConv2d) {
TfLiteConvParams* params = (TfLiteConvParams*)node->builtin_data;
+ assert(params->activation == kTfLiteActNone);
+ if (node_registration->version >= 2) {
+ assert(params->dilation_width_factor == 1);
+ assert(params->dilation_height_factor == 1);
+ }
operation->conv.stride_x = params->stride_width;
operation->conv.stride_y = params->stride_height;
operation->conv.padding_same = params->padding == kTfLitePaddingSame;
@@ -122,6 +127,11 @@ fill_operation(struct teflon_delegate *delegate, TfLiteContext *tf_context, TfLi
} else {
TfLiteDepthwiseConvParams* params = (TfLiteDepthwiseConvParams*)node->builtin_data;
+ assert(params->activation == kTfLiteActNone);
+ if (node_registration->version >= 2) {
+ assert(params->dilation_width_factor == 1);
+ assert(params->dilation_height_factor == 1);
+ }
operation->conv.stride_x = params->stride_width;
operation->conv.stride_y = params->stride_height;
operation->conv.padding_same = params->padding == kTfLitePaddingSame;
@@ -377,8 +387,30 @@ PrepareDelegate(TfLiteContext *context, TfLiteDelegate *delegate)
context, node_index, &node, ®istration));
switch(registration->builtin_code) {
- case kTfLiteBuiltinConv2d:
- case kTfLiteBuiltinDepthwiseConv2d:
+ case kTfLiteBuiltinConv2d: {
+ TfLiteConvParams* params = (TfLiteConvParams*)node->builtin_data;
+
+ // Fused activation and dilation not yet implemented
+ if (params->activation == kTfLiteActNone &&
+ (registration->version < 2 ||
+ (params->dilation_width_factor == 1 &&
+ params->dilation_height_factor == 1))) {
+ supported = true;
+ }
+ break;
+ }
+ case kTfLiteBuiltinDepthwiseConv2d: {
+ TfLiteDepthwiseConvParams* params = (TfLiteDepthwiseConvParams*)node->builtin_data;
+
+ // Fused activation and dilation not yet implemented
+ if (params->activation == kTfLiteActNone &&
+ (registration->version < 2 ||
+ (params->dilation_width_factor == 1 &&
+ params->dilation_height_factor == 1))) {
+ supported = true;
+ }
+ break;
+ }
case kTfLiteBuiltinAdd:
supported = true;
break;