diff options
author | oliper01 <oliver.perssonbogdanovski@arm.com> | 2022-06-21 08:51:01 +0000 |
---|---|---|
committer | oliver.perssonbogdanovski <oliver.perssonbogdanovski@arm.com> | 2022-07-12 11:22:47 +0000 |
commit | c4d35eb580902dfe6acedb2db3a72c32760f86af (patch) | |
tree | f431a4d50fbecd677ede45727844d7e7234a687b | |
parent | 6b2a0b4a64d01c8b038050a87c29f38a4909515c (diff) | |
download | ethos-u-vela-c4d35eb580902dfe6acedb2db3a72c32760f86af.tar.gz |
MLBEDSW-4856: Removed dead code
Hardswish activation function gets converted to LUT in graph optimizer. The case for it was removed, as it was never called.
Signed-off-by: oliper01 <oliver.perssonbogdanovski@arm.com>
Change-Id: I376e8d7b81489c06b66d4e49f59b207600c0ccce
-rw-r--r-- | ethosu/vela/operation.py | 4 |
1 files changed, 1 insertions, 3 deletions
diff --git a/ethosu/vela/operation.py b/ethosu/vela/operation.py index 6b6671be..9488fb7f 100644 --- a/ethosu/vela/operation.py +++ b/ethosu/vela/operation.py @@ -436,9 +436,7 @@ def create_activation_function(op_type: Op, min=None, max=None) -> ActivationFun elif op_type == Op.Sigmoid: act.min = 0.0 act.max = 1.0 - elif op_type == Op.HardSwish: - act.min = 0.0 - if op_type == Op.Clamp: + elif op_type == Op.Clamp: assert min is not None and max is not None act.min = min act.max = max |