ArmNN
 21.08
FuseActivationTests.cpp File Reference
#include "LayersFwd.hpp"
#include <Network.hpp>
#include <ResolveType.hpp>
#include <armnn/INetwork.hpp>
#include "test/GraphUtils.hpp"
#include <test/TestUtils.hpp>
#include <doctest/doctest.h>
#include <QuantizeHelper.hpp>
#include <string>

Go to the source code of this file.

Functions

 TEST_SUITE ("Optimizer")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "Optimizer"  )

Definition at line 616 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Elu, armnn::Float16, armnn::Float32, armnn::GpuAcc, armnn::HardSwish, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, armnn::QAsymmS8, armnn::QAsymmU8, armnn::ReLu, armnn::Sigmoid, armnn::TanH, and armnn::TEST_SUITE().

617 {
618 // ReLu fused into Receiver Layers Float32
619 TEST_CASE("FuseReLUIntoConvFloat32CpuAccTest")
620 {
621  ActivationDescriptor activationDescriptor;
622  activationDescriptor.m_Function = ActivationFunction::ReLu;
623 
624  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
625  (activationDescriptor, 0.0001f, Compute::CpuAcc);
626 }
627 TEST_CASE("FuseReLUIntoDWConvFloat32CpuAccTest")
628 {
629  ActivationDescriptor activationDescriptor;
630  activationDescriptor.m_Function = ActivationFunction::ReLu;
631 
632  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
633  (activationDescriptor, 0.0001f, Compute::CpuAcc);
634 }
635 TEST_CASE("FuseReLUIntoFullyConnectedFloat32CpuAccTest")
636 {
637  ActivationDescriptor activationDescriptor;
638  activationDescriptor.m_Function = ActivationFunction::ReLu;
639 
640  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
641  (activationDescriptor, 0.0001f, Compute::CpuAcc);
642 }
643 TEST_CASE("FuseReLUIntoBatchNormFloat32CpuAccTest")
644 {
645  ActivationDescriptor activationDescriptor;
646  activationDescriptor.m_Function = ActivationFunction::ReLu;
647 
648  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
649  (activationDescriptor, 0.0001f, Compute::CpuAcc);
650 }
651 
652 // BoundedReLu fused into Receiver Layers Float32
653 TEST_CASE("FuseBoundedReLUIntoConvFloat32CpuAccTest")
654 {
655  ActivationDescriptor activationDescriptor;
656  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
657  activationDescriptor.m_A = 1.0f;
658  activationDescriptor.m_B = -1.0f;
659 
660  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
661  (activationDescriptor, 0.0001f, Compute::CpuAcc);
662 }
663 TEST_CASE("FuseBoundedReLUIntoDWConvFloat32CpuAccTest")
664 {
665  ActivationDescriptor activationDescriptor;
666  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
667  activationDescriptor.m_A = 1.0f;
668  activationDescriptor.m_B = -1.0f;
669 
670  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::Float32 > , DataType::Float32 >
671  (activationDescriptor, 0.0001f, Compute::CpuAcc);
672 }
673 TEST_CASE("FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest")
674 {
675  ActivationDescriptor activationDescriptor;
676  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
677  activationDescriptor.m_A = 1.0f;
678  activationDescriptor.m_B = -1.0f;
679 
680  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
681  (activationDescriptor, 0.0001f, Compute::CpuAcc);
682 }
683 TEST_CASE("FuseBoundedReLUIntoBatchNormFloat32CpuAccTest")
684 {
685  ActivationDescriptor activationDescriptor;
686  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
687  activationDescriptor.m_A = 1.0f;
688  activationDescriptor.m_B = -1.0f;
689 
690  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
691  (activationDescriptor, 0.0001f, Compute::CpuAcc);
692 }
693 
694 // ReLU fused into Receiver Layers QAsymmU8
695 TEST_CASE("FuseReLUIntoConvQAsymmU8CpuAccTest")
696 {
697  ActivationDescriptor activationDescriptor;
698  activationDescriptor.m_Function = ActivationFunction::ReLu;
699 
700  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
701  (activationDescriptor, 0.0001f, Compute::CpuAcc);
702 }
703 TEST_CASE("FuseReLUIntoDWConvQAsymmU8CpuAccTest")
704 {
705  ActivationDescriptor activationDescriptor;
706  activationDescriptor.m_Function = ActivationFunction::ReLu;
707 
708  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
709  (activationDescriptor, 0.0001f, Compute::CpuAcc);
710 }
711 TEST_CASE("FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest")
712 {
713  ActivationDescriptor activationDescriptor;
714  activationDescriptor.m_Function = ActivationFunction::ReLu;
715 
716  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
717  (activationDescriptor, 0.0001f, Compute::CpuAcc);
718 }
719 
720 // BoundedReLu fused into Receiver Layers QAsymmS8
721 TEST_CASE("FuseBoundedReLUIntoConvQASymmS8CpuAccTest")
722 {
723  ActivationDescriptor activationDescriptor;
724  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
725  activationDescriptor.m_A = 6.0f;
726  activationDescriptor.m_B = 0.0f;
727 
728  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
729  (activationDescriptor, 0.0001f, Compute::CpuAcc);
730 }
731 TEST_CASE("FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest")
732 {
733  ActivationDescriptor activationDescriptor;
734  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
735  activationDescriptor.m_A = 6.0f;
736  activationDescriptor.m_B = 0.0f;
737 
738  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
739  (activationDescriptor, 0.0001f, Compute::CpuAcc);
740 }
741 TEST_CASE("FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest")
742 {
743  ActivationDescriptor activationDescriptor;
744  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
745  activationDescriptor.m_A = 6.0f;
746  activationDescriptor.m_B = 0.0f;
747 
748  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
749  (activationDescriptor, 0.0001f, Compute::CpuAcc);
750 }
751 
752 // TanH fused into Receiver Layers Float32
753 TEST_CASE("FuseTanHIntoConvFloat32CpuAccTest")
754 {
755  ActivationDescriptor activationDescriptor;
756  activationDescriptor.m_Function = ActivationFunction::TanH;
757 
758  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
759  (activationDescriptor, 0.0001f, Compute::CpuAcc);
760 }
761 
762 // HardSwish fused into Receiver Layers Float32
763 TEST_CASE("FuseHardSwishIntoConvFloat32CpuAccTest")
764 {
765  ActivationDescriptor activationDescriptor;
766  activationDescriptor.m_Function = ActivationFunction::HardSwish;
767 
768  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
769  (activationDescriptor, 0.0001f, Compute::CpuAcc);
770 }
771 
772 // Test that all receiver layers follow by all activation layers work, either fused or not fused
773 TEST_CASE("LayerFollowedByActivationFloat32CpuAccTest")
774 {
775  ActivationDescriptor activationDescriptor;
776  for (int i = 0; i != 12; ++i)
777  {
778  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
779  activationDescriptor.m_A = 1.0f;
780  activationDescriptor.m_B = -1.0f;
781  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
782  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
783  CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
784  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
785  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
786  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
787  CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
788  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
789  }
790 }
791 TEST_CASE("LayerFollowedByActivationFloat16CpuAccTest")
792 {
793  ActivationDescriptor activationDescriptor;
794  for (int i = 0; i != 12; ++i)
795  {
796  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
797  activationDescriptor.m_A = 1.0f;
798  activationDescriptor.m_B = -1.0f;
799  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
800  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
801  CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
802  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
803  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
804  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
805  CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
806  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
807  }
808 }
809 TEST_CASE("LayerFollowedByActivationQAsymmU8CpuAccTest")
810 {
811  ActivationDescriptor activationDescriptor;
812 
813  activationDescriptor.m_Function = ActivationFunction::Sigmoid;
814  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
815  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "Convolution + Activation function " <<
816  static_cast<int>(activationDescriptor.m_Function));
817  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
818  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "FullyConnected + Activation function " <<
819  static_cast<int>(activationDescriptor.m_Function));
820 
821  activationDescriptor.m_Function = ActivationFunction::TanH;
822  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
823  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "Convolution + Activation function " <<
824  static_cast<int>(activationDescriptor.m_Function));
825  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
826  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "FullyConnected + Activation function " <<
827  static_cast<int>(activationDescriptor.m_Function));
828 
829  activationDescriptor.m_Function = ActivationFunction::ReLu;
830  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
831  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
832  static_cast<int>(activationDescriptor.m_Function));
833  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
834  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
835  static_cast<int>(activationDescriptor.m_Function));
836 
837  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
838  activationDescriptor.m_A = 1.0f;
839  activationDescriptor.m_B = -1.0f;
840  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
841  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
842  static_cast<int>(activationDescriptor.m_Function));
843  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
844  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
845  static_cast<int>(activationDescriptor.m_Function));
846 
847  activationDescriptor.m_Function = ActivationFunction::HardSwish;
848  CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
849  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
850  static_cast<int>(activationDescriptor.m_Function));
851  CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
852  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
853  static_cast<int>(activationDescriptor.m_Function));
854 }
855 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48
ActivationFunction
Definition: Types.hpp:66