ArmNN
 22.05
FuseActivationTests.cpp File Reference
#include "LayersFwd.hpp"
#include <Network.hpp>
#include <ResolveType.hpp>
#include <armnn/INetwork.hpp>
#include <GraphUtils.hpp>
#include <TestUtils.hpp>
#include <doctest/doctest.h>
#include <armnnUtils/QuantizeHelper.hpp>
#include <string>

Go to the source code of this file.

Functions

 TEST_SUITE ("Optimizer")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "Optimizer"  )

Definition at line 632 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Elu, armnn::Float16, armnn::Float32, armnn::GpuAcc, armnn::HardSwish, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, armnn::QAsymmS8, armnn::QAsymmU8, armnn::ReLu, armnn::Sigmoid, armnn::TanH, and armnn::TEST_SUITE().

633 {
634 // ReLu fused into Receiver Layers Float32
635 TEST_CASE("FuseReLUIntoConvFloat32CpuAccTest")
636 {
637  ActivationDescriptor activationDescriptor;
638  activationDescriptor.m_Function = ActivationFunction::ReLu;
639 
640  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
641  (activationDescriptor, 0.0001f, Compute::CpuAcc);
642 }
643 TEST_CASE("FuseReLUIntoDWConvFloat32CpuAccTest")
644 {
645  ActivationDescriptor activationDescriptor;
646  activationDescriptor.m_Function = ActivationFunction::ReLu;
647 
648  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
649  (activationDescriptor, 0.0001f, Compute::CpuAcc);
650 }
651 TEST_CASE("FuseReLUIntoFullyConnectedFloat32CpuAccTest")
652 {
653  ActivationDescriptor activationDescriptor;
654  activationDescriptor.m_Function = ActivationFunction::ReLu;
655 
656  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
657  (activationDescriptor, 0.0001f, Compute::CpuAcc);
658 }
659 TEST_CASE("FuseReLUIntoBatchNormFloat32CpuAccTest")
660 {
661  ActivationDescriptor activationDescriptor;
662  activationDescriptor.m_Function = ActivationFunction::ReLu;
663 
664  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
665  (activationDescriptor, 0.0001f, Compute::CpuAcc);
666 }
667 
668 // BoundedReLu fused into Receiver Layers Float32
669 TEST_CASE("FuseBoundedReLUIntoConvFloat32CpuAccTest")
670 {
671  ActivationDescriptor activationDescriptor;
672  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
673  activationDescriptor.m_A = 1.0f;
674  activationDescriptor.m_B = -1.0f;
675 
676  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
677  (activationDescriptor, 0.0001f, Compute::CpuAcc);
678 }
679 TEST_CASE("FuseBoundedReLUIntoDWConvFloat32CpuAccTest")
680 {
681  ActivationDescriptor activationDescriptor;
682  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
683  activationDescriptor.m_A = 1.0f;
684  activationDescriptor.m_B = -1.0f;
685 
686  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::Float32 > , DataType::Float32 >
687  (activationDescriptor, 0.0001f, Compute::CpuAcc);
688 }
689 TEST_CASE("FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest")
690 {
691  ActivationDescriptor activationDescriptor;
692  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
693  activationDescriptor.m_A = 1.0f;
694  activationDescriptor.m_B = -1.0f;
695 
696  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
697  (activationDescriptor, 0.0001f, Compute::CpuAcc);
698 }
699 TEST_CASE("FuseBoundedReLUIntoBatchNormFloat32CpuAccTest")
700 {
701  ActivationDescriptor activationDescriptor;
702  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
703  activationDescriptor.m_A = 1.0f;
704  activationDescriptor.m_B = -1.0f;
705 
706  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
707  (activationDescriptor, 0.0001f, Compute::CpuAcc);
708 }
709 
710 // ReLU fused into Receiver Layers QAsymmU8
711 TEST_CASE("FuseReLUIntoConvQAsymmU8CpuAccTest")
712 {
713  ActivationDescriptor activationDescriptor;
714  activationDescriptor.m_Function = ActivationFunction::ReLu;
715 
716  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
717  (activationDescriptor, 0.0001f, Compute::CpuAcc);
718 }
719 TEST_CASE("FuseReLUIntoDWConvQAsymmU8CpuAccTest")
720 {
721  ActivationDescriptor activationDescriptor;
722  activationDescriptor.m_Function = ActivationFunction::ReLu;
723 
724  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
725  (activationDescriptor, 0.0001f, Compute::CpuAcc);
726 }
727 TEST_CASE("FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest")
728 {
729  ActivationDescriptor activationDescriptor;
730  activationDescriptor.m_Function = ActivationFunction::ReLu;
731 
732  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
733  (activationDescriptor, 0.0001f, Compute::CpuAcc);
734 }
735 
736 // BoundedReLu fused into Receiver Layers QAsymmS8
737 TEST_CASE("FuseBoundedReLUIntoConvQASymmS8CpuAccTest")
738 {
739  ActivationDescriptor activationDescriptor;
740  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
741  activationDescriptor.m_A = 6.0f;
742  activationDescriptor.m_B = 0.0f;
743 
744  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
745  (activationDescriptor, 0.0001f, Compute::CpuAcc);
746 }
747 TEST_CASE("FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest")
748 {
749  ActivationDescriptor activationDescriptor;
750  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
751  activationDescriptor.m_A = 6.0f;
752  activationDescriptor.m_B = 0.0f;
753 
754  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
755  (activationDescriptor, 0.0001f, Compute::CpuAcc);
756 }
757 TEST_CASE("FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest")
758 {
759  ActivationDescriptor activationDescriptor;
760  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
761  activationDescriptor.m_A = 6.0f;
762  activationDescriptor.m_B = 0.0f;
763 
764  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
765  (activationDescriptor, 0.0001f, Compute::CpuAcc);
766 }
767 
768 // TanH fused into Receiver Layers Float32
769 TEST_CASE("FuseTanHIntoConvFloat32CpuAccTest")
770 {
771  ActivationDescriptor activationDescriptor;
772  activationDescriptor.m_Function = ActivationFunction::TanH;
773 
774  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
775  (activationDescriptor, 0.0001f, Compute::CpuAcc);
776 }
777 
778 // HardSwish fused into Receiver Layers Float32
779 TEST_CASE("FuseHardSwishIntoConvFloat32CpuAccTest")
780 {
781  ActivationDescriptor activationDescriptor;
782  activationDescriptor.m_Function = ActivationFunction::HardSwish;
783 
784  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
785  (activationDescriptor, 0.0001f, Compute::CpuAcc);
786 }
787 
788 // Test that all receiver layers follow by all activation layers work, either fused or not fused
789 TEST_CASE("LayerFollowedByActivationFloat32CpuAccTest")
790 {
791  ActivationDescriptor activationDescriptor;
792  for (int i = 0; i != 12; ++i)
793  {
794  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
795  activationDescriptor.m_A = 1.0f;
796  activationDescriptor.m_B = -1.0f;
797  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
798  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
799  CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
800  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
801  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
802  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
803  CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
804  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
805  }
806 }
807 TEST_CASE("LayerFollowedByActivationFloat16CpuAccTest")
808 {
809  ActivationDescriptor activationDescriptor;
810  for (int i = 0; i != 12; ++i)
811  {
812  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
813  activationDescriptor.m_A = 1.0f;
814  activationDescriptor.m_B = -1.0f;
815  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
816  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
817  CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
818  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
819  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
820  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
821  CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
822  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
823  }
824 }
825 TEST_CASE("LayerFollowedByActivationQAsymmU8CpuAccTest")
826 {
827  ActivationDescriptor activationDescriptor;
828 
829  activationDescriptor.m_Function = ActivationFunction::Sigmoid;
830  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
831  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "Convolution + Activation function " <<
832  static_cast<int>(activationDescriptor.m_Function));
833  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
834  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "FullyConnected + Activation function " <<
835  static_cast<int>(activationDescriptor.m_Function));
836 
837  activationDescriptor.m_Function = ActivationFunction::TanH;
838  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
839  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "Convolution + Activation function " <<
840  static_cast<int>(activationDescriptor.m_Function));
841  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
842  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "FullyConnected + Activation function " <<
843  static_cast<int>(activationDescriptor.m_Function));
844 
845  activationDescriptor.m_Function = ActivationFunction::ReLu;
846  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
847  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
848  static_cast<int>(activationDescriptor.m_Function));
849  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
850  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
851  static_cast<int>(activationDescriptor.m_Function));
852 
853  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
854  activationDescriptor.m_A = 1.0f;
855  activationDescriptor.m_B = -1.0f;
856  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
857  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
858  static_cast<int>(activationDescriptor.m_Function));
859  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
860  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
861  static_cast<int>(activationDescriptor.m_Function));
862 
863  activationDescriptor.m_Function = ActivationFunction::HardSwish;
864  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
865  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
866  static_cast<int>(activationDescriptor.m_Function));
867  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
868  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
869  static_cast<int>(activationDescriptor.m_Function));
870 }
871 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:61
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:63
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:59
ActivationFunction
Definition: Types.hpp:86