ArmNN
 22.02
FuseActivationTests.cpp File Reference
#include "LayersFwd.hpp"
#include <Network.hpp>
#include <ResolveType.hpp>
#include <armnn/INetwork.hpp>
#include <GraphUtils.hpp>
#include <TestUtils.hpp>
#include <doctest/doctest.h>
#include <armnnUtils/QuantizeHelper.hpp>
#include <string>

Go to the source code of this file.

Functions

 TEST_SUITE ("Optimizer")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "Optimizer"  )

Definition at line 625 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Elu, armnn::Float16, armnn::Float32, armnn::GpuAcc, armnn::HardSwish, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, armnn::QAsymmS8, armnn::QAsymmU8, armnn::ReLu, armnn::Sigmoid, armnn::TanH, and armnn::TEST_SUITE().

626 {
627 // ReLu fused into Receiver Layers Float32
628 TEST_CASE("FuseReLUIntoConvFloat32CpuAccTest")
629 {
630  ActivationDescriptor activationDescriptor;
631  activationDescriptor.m_Function = ActivationFunction::ReLu;
632 
633  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
634  (activationDescriptor, 0.0001f, Compute::CpuAcc);
635 }
636 TEST_CASE("FuseReLUIntoDWConvFloat32CpuAccTest")
637 {
638  ActivationDescriptor activationDescriptor;
639  activationDescriptor.m_Function = ActivationFunction::ReLu;
640 
641  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
642  (activationDescriptor, 0.0001f, Compute::CpuAcc);
643 }
644 TEST_CASE("FuseReLUIntoFullyConnectedFloat32CpuAccTest")
645 {
646  ActivationDescriptor activationDescriptor;
647  activationDescriptor.m_Function = ActivationFunction::ReLu;
648 
649  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
650  (activationDescriptor, 0.0001f, Compute::CpuAcc);
651 }
652 TEST_CASE("FuseReLUIntoBatchNormFloat32CpuAccTest")
653 {
654  ActivationDescriptor activationDescriptor;
655  activationDescriptor.m_Function = ActivationFunction::ReLu;
656 
657  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
658  (activationDescriptor, 0.0001f, Compute::CpuAcc);
659 }
660 
661 // BoundedReLu fused into Receiver Layers Float32
662 TEST_CASE("FuseBoundedReLUIntoConvFloat32CpuAccTest")
663 {
664  ActivationDescriptor activationDescriptor;
665  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
666  activationDescriptor.m_A = 1.0f;
667  activationDescriptor.m_B = -1.0f;
668 
669  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
670  (activationDescriptor, 0.0001f, Compute::CpuAcc);
671 }
672 TEST_CASE("FuseBoundedReLUIntoDWConvFloat32CpuAccTest")
673 {
674  ActivationDescriptor activationDescriptor;
675  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
676  activationDescriptor.m_A = 1.0f;
677  activationDescriptor.m_B = -1.0f;
678 
679  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::Float32 > , DataType::Float32 >
680  (activationDescriptor, 0.0001f, Compute::CpuAcc);
681 }
682 TEST_CASE("FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest")
683 {
684  ActivationDescriptor activationDescriptor;
685  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
686  activationDescriptor.m_A = 1.0f;
687  activationDescriptor.m_B = -1.0f;
688 
689  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
690  (activationDescriptor, 0.0001f, Compute::CpuAcc);
691 }
692 TEST_CASE("FuseBoundedReLUIntoBatchNormFloat32CpuAccTest")
693 {
694  ActivationDescriptor activationDescriptor;
695  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
696  activationDescriptor.m_A = 1.0f;
697  activationDescriptor.m_B = -1.0f;
698 
699  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
700  (activationDescriptor, 0.0001f, Compute::CpuAcc);
701 }
702 
703 // ReLU fused into Receiver Layers QAsymmU8
704 TEST_CASE("FuseReLUIntoConvQAsymmU8CpuAccTest")
705 {
706  ActivationDescriptor activationDescriptor;
707  activationDescriptor.m_Function = ActivationFunction::ReLu;
708 
709  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
710  (activationDescriptor, 0.0001f, Compute::CpuAcc);
711 }
712 TEST_CASE("FuseReLUIntoDWConvQAsymmU8CpuAccTest")
713 {
714  ActivationDescriptor activationDescriptor;
715  activationDescriptor.m_Function = ActivationFunction::ReLu;
716 
717  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
718  (activationDescriptor, 0.0001f, Compute::CpuAcc);
719 }
720 TEST_CASE("FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest")
721 {
722  ActivationDescriptor activationDescriptor;
723  activationDescriptor.m_Function = ActivationFunction::ReLu;
724 
725  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
726  (activationDescriptor, 0.0001f, Compute::CpuAcc);
727 }
728 
729 // BoundedReLu fused into Receiver Layers QAsymmS8
730 TEST_CASE("FuseBoundedReLUIntoConvQASymmS8CpuAccTest")
731 {
732  ActivationDescriptor activationDescriptor;
733  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
734  activationDescriptor.m_A = 6.0f;
735  activationDescriptor.m_B = 0.0f;
736 
737  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
738  (activationDescriptor, 0.0001f, Compute::CpuAcc);
739 }
740 TEST_CASE("FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest")
741 {
742  ActivationDescriptor activationDescriptor;
743  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
744  activationDescriptor.m_A = 6.0f;
745  activationDescriptor.m_B = 0.0f;
746 
747  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
748  (activationDescriptor, 0.0001f, Compute::CpuAcc);
749 }
750 TEST_CASE("FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest")
751 {
752  ActivationDescriptor activationDescriptor;
753  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
754  activationDescriptor.m_A = 6.0f;
755  activationDescriptor.m_B = 0.0f;
756 
757  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
758  (activationDescriptor, 0.0001f, Compute::CpuAcc);
759 }
760 
761 // TanH fused into Receiver Layers Float32
762 TEST_CASE("FuseTanHIntoConvFloat32CpuAccTest")
763 {
764  ActivationDescriptor activationDescriptor;
765  activationDescriptor.m_Function = ActivationFunction::TanH;
766 
767  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
768  (activationDescriptor, 0.0001f, Compute::CpuAcc);
769 }
770 
771 // HardSwish fused into Receiver Layers Float32
772 TEST_CASE("FuseHardSwishIntoConvFloat32CpuAccTest")
773 {
774  ActivationDescriptor activationDescriptor;
775  activationDescriptor.m_Function = ActivationFunction::HardSwish;
776 
777  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
778  (activationDescriptor, 0.0001f, Compute::CpuAcc);
779 }
780 
781 // Test that all receiver layers follow by all activation layers work, either fused or not fused
782 TEST_CASE("LayerFollowedByActivationFloat32CpuAccTest")
783 {
784  ActivationDescriptor activationDescriptor;
785  for (int i = 0; i != 12; ++i)
786  {
787  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
788  activationDescriptor.m_A = 1.0f;
789  activationDescriptor.m_B = -1.0f;
790  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
791  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
792  CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
793  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
794  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
795  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
796  CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
797  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
798  }
799 }
800 TEST_CASE("LayerFollowedByActivationFloat16CpuAccTest")
801 {
802  ActivationDescriptor activationDescriptor;
803  for (int i = 0; i != 12; ++i)
804  {
805  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
806  activationDescriptor.m_A = 1.0f;
807  activationDescriptor.m_B = -1.0f;
808  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
809  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
810  CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
811  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
812  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
813  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
814  CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
815  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
816  }
817 }
818 TEST_CASE("LayerFollowedByActivationQAsymmU8CpuAccTest")
819 {
820  ActivationDescriptor activationDescriptor;
821 
822  activationDescriptor.m_Function = ActivationFunction::Sigmoid;
823  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
824  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "Convolution + Activation function " <<
825  static_cast<int>(activationDescriptor.m_Function));
826  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
827  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "FullyConnected + Activation function " <<
828  static_cast<int>(activationDescriptor.m_Function));
829 
830  activationDescriptor.m_Function = ActivationFunction::TanH;
831  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
832  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "Convolution + Activation function " <<
833  static_cast<int>(activationDescriptor.m_Function));
834  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
835  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "FullyConnected + Activation function " <<
836  static_cast<int>(activationDescriptor.m_Function));
837 
838  activationDescriptor.m_Function = ActivationFunction::ReLu;
839  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
840  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
841  static_cast<int>(activationDescriptor.m_Function));
842  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
843  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
844  static_cast<int>(activationDescriptor.m_Function));
845 
846  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
847  activationDescriptor.m_A = 1.0f;
848  activationDescriptor.m_B = -1.0f;
849  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
850  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
851  static_cast<int>(activationDescriptor.m_Function));
852  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
853  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
854  static_cast<int>(activationDescriptor.m_Function));
855 
856  activationDescriptor.m_Function = ActivationFunction::HardSwish;
857  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
858  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
859  static_cast<int>(activationDescriptor.m_Function));
860  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
861  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
862  static_cast<int>(activationDescriptor.m_Function));
863 }
864 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:61
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:63
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:59
ActivationFunction
Definition: Types.hpp:73