From 047c6fcd2ead657ea251a251893767aa90d6bde3 Mon Sep 17 00:00:00 2001 From: Gian Marco Iodice Date: Mon, 21 Sep 2020 14:22:25 +0100 Subject: COMPMID-3791: Add support for all activation types in NodeFusionMutator Change-Id: I9b548966201c00df8290fea7acf55c2173b0e0aa Signed-off-by: Gian Marco Iodice Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4011 Tested-by: Arm Jenkins Reviewed-by: Georgios Pinitas --- src/graph/mutators/NodeFusionMutator.cpp | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/graph/mutators/NodeFusionMutator.cpp b/src/graph/mutators/NodeFusionMutator.cpp index afc4452202..2a80825b36 100644 --- a/src/graph/mutators/NodeFusionMutator.cpp +++ b/src/graph/mutators/NodeFusionMutator.cpp @@ -300,10 +300,11 @@ IGraphMutator::MutationType NodeFusionMutator::type() const void NodeFusionMutator::mutate(Graph &g) { // Supported activations when fusing - const std::set supported_fused_activations_conv = { Activation::RELU, Activation::BOUNDED_RELU, Activation::LU_BOUNDED_RELU }; - const std::set supported_fused_activations_eltwise = { Activation::RELU, Activation::BOUNDED_RELU, Activation::LU_BOUNDED_RELU, - Activation::TANH, Activation::LOGISTIC - }; + const std::set supported_fused_activations = { Activation::ABS, Activation::BOUNDED_RELU, Activation::ELU, + Activation::HARD_SWISH, Activation::IDENTITY, Activation::LEAKY_RELU, + Activation::LINEAR, Activation::LOGISTIC, Activation::LU_BOUNDED_RELU, + Activation::RELU, Activation::SOFT_RELU, Activation::SQRT, + Activation::SQUARE, Activation::TANH }; // Preconditions auto empty_prec = [](INode &) @@ -328,11 +329,11 @@ void NodeFusionMutator::mutate(Graph &g) }; // Fusion mutations - detail::fuse_layer(g, empty_prec, detail::fuse_node_with_activation, supported_fused_activations_conv); - detail::fuse_layer(g, empty_prec, detail::fuse_node_with_activation, supported_fused_activations_conv); - detail::fuse_layer(g, qs8_prec, detail::fuse_node_with_activation, supported_fused_activations_conv); - detail::fuse_layer(g, empty_prec, detail::fuse_node_with_activation, supported_fused_activations_conv); - detail::fuse_layer(g, cl_target_prec, detail::fuse_node_with_activation, supported_fused_activations_eltwise); + detail::fuse_layer(g, empty_prec, detail::fuse_node_with_activation, supported_fused_activations); + detail::fuse_layer(g, empty_prec, detail::fuse_node_with_activation, supported_fused_activations); + detail::fuse_layer(g, qs8_prec, detail::fuse_node_with_activation, supported_fused_activations); + detail::fuse_layer(g, empty_prec, detail::fuse_node_with_activation, supported_fused_activations); + detail::fuse_layer(g, cl_target_prec, detail::fuse_node_with_activation, supported_fused_activations); detail::fuse_layer(g, empty_prec, detail::fuse_convolution_with_batch_normalization); detail::fuse_layer(g, empty_prec, detail::fuse_depthwise_convolution_with_batch_normalization); } -- cgit v1.2.1