635 TEST_CASE(
"FuseReLUIntoConvFloat32CpuAccTest")
638 activationDescriptor.
m_Function = ActivationFunction::ReLu;
640 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
641 (activationDescriptor, 0.0001f, Compute::CpuAcc);
643 TEST_CASE(
"FuseReLUIntoDWConvFloat32CpuAccTest")
646 activationDescriptor.
m_Function = ActivationFunction::ReLu;
648 FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
649 (activationDescriptor, 0.0001f, Compute::CpuAcc);
651 TEST_CASE(
"FuseReLUIntoFullyConnectedFloat32CpuAccTest")
654 activationDescriptor.
m_Function = ActivationFunction::ReLu;
656 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
657 (activationDescriptor, 0.0001f, Compute::CpuAcc);
659 TEST_CASE(
"FuseReLUIntoBatchNormFloat32CpuAccTest")
662 activationDescriptor.
m_Function = ActivationFunction::ReLu;
664 FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
665 (activationDescriptor, 0.0001f, Compute::CpuAcc);
669 TEST_CASE(
"FuseBoundedReLUIntoConvFloat32CpuAccTest")
672 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
673 activationDescriptor.
m_A = 1.0f;
674 activationDescriptor.
m_B = -1.0f;
676 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
677 (activationDescriptor, 0.0001f, Compute::CpuAcc);
679 TEST_CASE(
"FuseBoundedReLUIntoDWConvFloat32CpuAccTest")
682 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
683 activationDescriptor.
m_A = 1.0f;
684 activationDescriptor.
m_B = -1.0f;
686 FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::Float32 > , DataType::Float32 >
687 (activationDescriptor, 0.0001f, Compute::CpuAcc);
689 TEST_CASE(
"FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest")
692 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
693 activationDescriptor.
m_A = 1.0f;
694 activationDescriptor.
m_B = -1.0f;
696 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
697 (activationDescriptor, 0.0001f, Compute::CpuAcc);
699 TEST_CASE(
"FuseBoundedReLUIntoBatchNormFloat32CpuAccTest")
702 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
703 activationDescriptor.
m_A = 1.0f;
704 activationDescriptor.
m_B = -1.0f;
706 FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
707 (activationDescriptor, 0.0001f, Compute::CpuAcc);
711 TEST_CASE(
"FuseReLUIntoConvQAsymmU8CpuAccTest")
714 activationDescriptor.
m_Function = ActivationFunction::ReLu;
716 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
717 (activationDescriptor, 0.0001f, Compute::CpuAcc);
719 TEST_CASE(
"FuseReLUIntoDWConvQAsymmU8CpuAccTest")
722 activationDescriptor.
m_Function = ActivationFunction::ReLu;
724 FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
725 (activationDescriptor, 0.0001f, Compute::CpuAcc);
727 TEST_CASE(
"FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest")
730 activationDescriptor.
m_Function = ActivationFunction::ReLu;
732 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
733 (activationDescriptor, 0.0001f, Compute::CpuAcc);
737 TEST_CASE(
"FuseBoundedReLUIntoConvQASymmS8CpuAccTest")
740 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
741 activationDescriptor.
m_A = 6.0f;
742 activationDescriptor.
m_B = 0.0f;
744 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
745 (activationDescriptor, 0.0001f, Compute::CpuAcc);
747 TEST_CASE(
"FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest")
750 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
751 activationDescriptor.
m_A = 6.0f;
752 activationDescriptor.
m_B = 0.0f;
754 FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
755 (activationDescriptor, 0.0001f, Compute::CpuAcc);
757 TEST_CASE(
"FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest")
760 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
761 activationDescriptor.
m_A = 6.0f;
762 activationDescriptor.
m_B = 0.0f;
764 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
765 (activationDescriptor, 0.0001f, Compute::CpuAcc);
769 TEST_CASE(
"FuseTanHIntoConvFloat32CpuAccTest")
772 activationDescriptor.
m_Function = ActivationFunction::TanH;
774 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
775 (activationDescriptor, 0.0001f, Compute::CpuAcc);
779 TEST_CASE(
"FuseHardSwishIntoConvFloat32CpuAccTest")
782 activationDescriptor.
m_Function = ActivationFunction::HardSwish;
784 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
785 (activationDescriptor, 0.0001f, Compute::CpuAcc);
789 TEST_CASE(
"LayerFollowedByActivationFloat32CpuAccTest")
792 for (
int i = 0; i != 12; ++i)
795 activationDescriptor.
m_A = 1.0f;
796 activationDescriptor.
m_B = -1.0f;
797 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
798 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " << i);
799 CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
800 (activationDescriptor, Compute::CpuAcc)),
"DepthwiseConvolution + Activation function " << i);
801 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
802 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " << i);
803 CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
804 (activationDescriptor, Compute::CpuAcc)),
"BatchNorm + Activation function " << i);
807 TEST_CASE(
"LayerFollowedByActivationFloat16CpuAccTest")
810 for (
int i = 0; i != 12; ++i)
813 activationDescriptor.
m_A = 1.0f;
814 activationDescriptor.
m_B = -1.0f;
815 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
816 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " << i);
817 CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
818 (activationDescriptor, Compute::CpuAcc)),
"DepthwiseConvolution + Activation function " << i);
819 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
820 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " << i);
821 CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
822 (activationDescriptor, Compute::CpuAcc)),
"BatchNorm + Activation function " << i);
825 TEST_CASE(
"LayerFollowedByActivationQAsymmU8CpuAccTest")
829 activationDescriptor.
m_Function = ActivationFunction::Sigmoid;
830 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
831 (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)),
"Convolution + Activation function " <<
832 static_cast<int>(activationDescriptor.
m_Function));
833 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
834 (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)),
"FullyConnected + Activation function " <<
835 static_cast<int>(activationDescriptor.
m_Function));
837 activationDescriptor.
m_Function = ActivationFunction::TanH;
838 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
839 (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)),
"Convolution + Activation function " <<
840 static_cast<int>(activationDescriptor.
m_Function));
841 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
842 (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)),
"FullyConnected + Activation function " <<
843 static_cast<int>(activationDescriptor.
m_Function));
845 activationDescriptor.
m_Function = ActivationFunction::ReLu;
846 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
847 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
848 static_cast<int>(activationDescriptor.
m_Function));
849 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
850 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
851 static_cast<int>(activationDescriptor.
m_Function));
853 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
854 activationDescriptor.
m_A = 1.0f;
855 activationDescriptor.
m_B = -1.0f;
856 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
857 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
858 static_cast<int>(activationDescriptor.
m_Function));
859 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
860 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
861 static_cast<int>(activationDescriptor.
m_Function));
863 activationDescriptor.
m_Function = ActivationFunction::HardSwish;
864 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
865 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
866 static_cast<int>(activationDescriptor.
m_Function));
867 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
868 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
869 static_cast<int>(activationDescriptor.
m_Function));
An ActivationDescriptor for the ActivationLayer.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).