From 08346e9b9a7dadd2f0765aea64e656902d843e8a Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Tue, 16 Oct 2018 19:10:46 +0100 Subject: COMPMID-1451:Fuse RELU,LU_BOUNDED_RELU with requantization in NEGEMMConvolutionLayer. Change-Id: Iea5f2c5bcac8051c4c7655a6eabb2c43772eb31f Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/154104 Tested-by: bsgcomp Reviewed-by: Michele DiGiorgio Reviewed-by: Gian Marco Iodice --- arm_compute/graph/nodes/BatchNormalizationLayerNode.h | 3 +++ 1 file changed, 3 insertions(+) (limited to 'arm_compute/graph/nodes/BatchNormalizationLayerNode.h') diff --git a/arm_compute/graph/nodes/BatchNormalizationLayerNode.h b/arm_compute/graph/nodes/BatchNormalizationLayerNode.h index a364d1c5ae..b2284782bd 100644 --- a/arm_compute/graph/nodes/BatchNormalizationLayerNode.h +++ b/arm_compute/graph/nodes/BatchNormalizationLayerNode.h @@ -62,6 +62,9 @@ public: TensorDescriptor configure_output(size_t idx) const override; void accept(INodeVisitor &v) override; +public: + static constexpr NodeType node_type = NodeType::BatchNormalizationLayer; + private: float _epsilon; ActivationLayerInfo _fused_activation; -- cgit v1.2.1