From 8b2a7d3aa119e7f1d6a03690d05eb27c5d178b9f Mon Sep 17 00:00:00 2001 From: Giorgio Arena Date: Tue, 11 Feb 2020 17:21:31 +0000 Subject: COMPMID-3101 Fuse activation with floating point elementwise operation layers in CL Signed-off-by: Giorgio Arena Change-Id: I1693f8664ba7c0dc8c076bbe7365cef1e667bd25 Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/2718 Tested-by: Arm Jenkins Reviewed-by: Gian Marco Iodice Comments-Addressed: Arm Jenkins --- src/graph/nodes/EltwiseLayerNode.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'src/graph/nodes') diff --git a/src/graph/nodes/EltwiseLayerNode.cpp b/src/graph/nodes/EltwiseLayerNode.cpp index a83a5fb3b2..92d183e693 100644 --- a/src/graph/nodes/EltwiseLayerNode.cpp +++ b/src/graph/nodes/EltwiseLayerNode.cpp @@ -52,6 +52,16 @@ RoundingPolicy EltwiseLayerNode::rounding_policy() const return descriptor.r_policy; } +ActivationLayerInfo EltwiseLayerNode::fused_activation() const +{ + return descriptor.fused_activation; +} + +void EltwiseLayerNode::set_fused_activation(ActivationLayerInfo fused_activation) +{ + descriptor.fused_activation = fused_activation; +} + bool EltwiseLayerNode::forward_descriptors() { if((input_id(0) != NullTensorID) && (output_id(0) != NullTensorID)) -- cgit v1.2.1