628 TEST_CASE(
"FuseReLUIntoConvFloat32CpuAccTest")
631 activationDescriptor.
m_Function = ActivationFunction::ReLu;
633 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
634 (activationDescriptor, 0.0001f, Compute::CpuAcc);
636 TEST_CASE(
"FuseReLUIntoDWConvFloat32CpuAccTest")
639 activationDescriptor.
m_Function = ActivationFunction::ReLu;
641 FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
642 (activationDescriptor, 0.0001f, Compute::CpuAcc);
644 TEST_CASE(
"FuseReLUIntoFullyConnectedFloat32CpuAccTest")
647 activationDescriptor.
m_Function = ActivationFunction::ReLu;
649 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
650 (activationDescriptor, 0.0001f, Compute::CpuAcc);
652 TEST_CASE(
"FuseReLUIntoBatchNormFloat32CpuAccTest")
655 activationDescriptor.
m_Function = ActivationFunction::ReLu;
657 FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
658 (activationDescriptor, 0.0001f, Compute::CpuAcc);
662 TEST_CASE(
"FuseBoundedReLUIntoConvFloat32CpuAccTest")
665 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
666 activationDescriptor.
m_A = 1.0f;
667 activationDescriptor.
m_B = -1.0f;
669 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
670 (activationDescriptor, 0.0001f, Compute::CpuAcc);
672 TEST_CASE(
"FuseBoundedReLUIntoDWConvFloat32CpuAccTest")
675 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
676 activationDescriptor.
m_A = 1.0f;
677 activationDescriptor.
m_B = -1.0f;
679 FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::Float32 > , DataType::Float32 >
680 (activationDescriptor, 0.0001f, Compute::CpuAcc);
682 TEST_CASE(
"FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest")
685 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
686 activationDescriptor.
m_A = 1.0f;
687 activationDescriptor.
m_B = -1.0f;
689 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
690 (activationDescriptor, 0.0001f, Compute::CpuAcc);
692 TEST_CASE(
"FuseBoundedReLUIntoBatchNormFloat32CpuAccTest")
695 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
696 activationDescriptor.
m_A = 1.0f;
697 activationDescriptor.
m_B = -1.0f;
699 FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
700 (activationDescriptor, 0.0001f, Compute::CpuAcc);
704 TEST_CASE(
"FuseReLUIntoConvQAsymmU8CpuAccTest")
707 activationDescriptor.
m_Function = ActivationFunction::ReLu;
709 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
710 (activationDescriptor, 0.0001f, Compute::CpuAcc);
712 TEST_CASE(
"FuseReLUIntoDWConvQAsymmU8CpuAccTest")
715 activationDescriptor.
m_Function = ActivationFunction::ReLu;
717 FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
718 (activationDescriptor, 0.0001f, Compute::CpuAcc);
720 TEST_CASE(
"FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest")
723 activationDescriptor.
m_Function = ActivationFunction::ReLu;
725 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
726 (activationDescriptor, 0.0001f, Compute::CpuAcc);
730 TEST_CASE(
"FuseBoundedReLUIntoConvQASymmS8CpuAccTest")
733 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
734 activationDescriptor.
m_A = 6.0f;
735 activationDescriptor.
m_B = 0.0f;
737 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
738 (activationDescriptor, 0.0001f, Compute::CpuAcc);
740 TEST_CASE(
"FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest")
743 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
744 activationDescriptor.
m_A = 6.0f;
745 activationDescriptor.
m_B = 0.0f;
747 FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
748 (activationDescriptor, 0.0001f, Compute::CpuAcc);
750 TEST_CASE(
"FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest")
753 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
754 activationDescriptor.
m_A = 6.0f;
755 activationDescriptor.
m_B = 0.0f;
757 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
758 (activationDescriptor, 0.0001f, Compute::CpuAcc);
762 TEST_CASE(
"FuseTanHIntoConvFloat32CpuAccTest")
765 activationDescriptor.
m_Function = ActivationFunction::TanH;
767 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
768 (activationDescriptor, 0.0001f, Compute::CpuAcc);
772 TEST_CASE(
"FuseHardSwishIntoConvFloat32CpuAccTest")
775 activationDescriptor.
m_Function = ActivationFunction::HardSwish;
777 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
778 (activationDescriptor, 0.0001f, Compute::CpuAcc);
782 TEST_CASE(
"LayerFollowedByActivationFloat32CpuAccTest")
785 for (
int i = 0; i != 12; ++i)
788 activationDescriptor.
m_A = 1.0f;
789 activationDescriptor.
m_B = -1.0f;
790 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
791 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " << i);
792 CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
793 (activationDescriptor, Compute::CpuAcc)),
"DepthwiseConvolution + Activation function " << i);
794 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
795 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " << i);
796 CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
797 (activationDescriptor, Compute::CpuAcc)),
"BatchNorm + Activation function " << i);
800 TEST_CASE(
"LayerFollowedByActivationFloat16CpuAccTest")
803 for (
int i = 0; i != 12; ++i)
806 activationDescriptor.
m_A = 1.0f;
807 activationDescriptor.
m_B = -1.0f;
808 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
809 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " << i);
810 CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
811 (activationDescriptor, Compute::CpuAcc)),
"DepthwiseConvolution + Activation function " << i);
812 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
813 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " << i);
814 CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
815 (activationDescriptor, Compute::CpuAcc)),
"BatchNorm + Activation function " << i);
818 TEST_CASE(
"LayerFollowedByActivationQAsymmU8CpuAccTest")
822 activationDescriptor.
m_Function = ActivationFunction::Sigmoid;
823 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
824 (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)),
"Convolution + Activation function " <<
825 static_cast<int>(activationDescriptor.
m_Function));
826 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
827 (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)),
"FullyConnected + Activation function " <<
828 static_cast<int>(activationDescriptor.
m_Function));
830 activationDescriptor.
m_Function = ActivationFunction::TanH;
831 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
832 (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)),
"Convolution + Activation function " <<
833 static_cast<int>(activationDescriptor.
m_Function));
834 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
835 (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)),
"FullyConnected + Activation function " <<
836 static_cast<int>(activationDescriptor.
m_Function));
838 activationDescriptor.
m_Function = ActivationFunction::ReLu;
839 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
840 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
841 static_cast<int>(activationDescriptor.
m_Function));
842 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
843 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
844 static_cast<int>(activationDescriptor.
m_Function));
846 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
847 activationDescriptor.
m_A = 1.0f;
848 activationDescriptor.
m_B = -1.0f;
849 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
850 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
851 static_cast<int>(activationDescriptor.
m_Function));
852 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
853 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
854 static_cast<int>(activationDescriptor.
m_Function));
856 activationDescriptor.
m_Function = ActivationFunction::HardSwish;
857 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
858 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
859 static_cast<int>(activationDescriptor.
m_Function));
860 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
861 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
862 static_cast<int>(activationDescriptor.
m_Function));
An ActivationDescriptor for the ActivationLayer.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).