619 TEST_CASE(
"FuseReLUIntoConvFloat32CpuAccTest")
622 activationDescriptor.
m_Function = ActivationFunction::ReLu;
624 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
625 (activationDescriptor, 0.0001f, Compute::CpuAcc);
627 TEST_CASE(
"FuseReLUIntoDWConvFloat32CpuAccTest")
630 activationDescriptor.
m_Function = ActivationFunction::ReLu;
632 FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
633 (activationDescriptor, 0.0001f, Compute::CpuAcc);
635 TEST_CASE(
"FuseReLUIntoFullyConnectedFloat32CpuAccTest")
638 activationDescriptor.
m_Function = ActivationFunction::ReLu;
640 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
641 (activationDescriptor, 0.0001f, Compute::CpuAcc);
643 TEST_CASE(
"FuseReLUIntoBatchNormFloat32CpuAccTest")
646 activationDescriptor.
m_Function = ActivationFunction::ReLu;
648 FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
649 (activationDescriptor, 0.0001f, Compute::CpuAcc);
653 TEST_CASE(
"FuseBoundedReLUIntoConvFloat32CpuAccTest")
656 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
657 activationDescriptor.
m_A = 1.0f;
658 activationDescriptor.
m_B = -1.0f;
660 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
661 (activationDescriptor, 0.0001f, Compute::CpuAcc);
663 TEST_CASE(
"FuseBoundedReLUIntoDWConvFloat32CpuAccTest")
666 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
667 activationDescriptor.
m_A = 1.0f;
668 activationDescriptor.
m_B = -1.0f;
670 FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::Float32 > , DataType::Float32 >
671 (activationDescriptor, 0.0001f, Compute::CpuAcc);
673 TEST_CASE(
"FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest")
676 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
677 activationDescriptor.
m_A = 1.0f;
678 activationDescriptor.
m_B = -1.0f;
680 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
681 (activationDescriptor, 0.0001f, Compute::CpuAcc);
683 TEST_CASE(
"FuseBoundedReLUIntoBatchNormFloat32CpuAccTest")
686 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
687 activationDescriptor.
m_A = 1.0f;
688 activationDescriptor.
m_B = -1.0f;
690 FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
691 (activationDescriptor, 0.0001f, Compute::CpuAcc);
695 TEST_CASE(
"FuseReLUIntoConvQAsymmU8CpuAccTest")
698 activationDescriptor.
m_Function = ActivationFunction::ReLu;
700 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
701 (activationDescriptor, 0.0001f, Compute::CpuAcc);
703 TEST_CASE(
"FuseReLUIntoDWConvQAsymmU8CpuAccTest")
706 activationDescriptor.
m_Function = ActivationFunction::ReLu;
708 FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
709 (activationDescriptor, 0.0001f, Compute::CpuAcc);
711 TEST_CASE(
"FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest")
714 activationDescriptor.
m_Function = ActivationFunction::ReLu;
716 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
717 (activationDescriptor, 0.0001f, Compute::CpuAcc);
721 TEST_CASE(
"FuseBoundedReLUIntoConvQASymmS8CpuAccTest")
724 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
725 activationDescriptor.
m_A = 6.0f;
726 activationDescriptor.
m_B = 0.0f;
728 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
729 (activationDescriptor, 0.0001f, Compute::CpuAcc);
731 TEST_CASE(
"FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest")
734 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
735 activationDescriptor.
m_A = 6.0f;
736 activationDescriptor.
m_B = 0.0f;
738 FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
739 (activationDescriptor, 0.0001f, Compute::CpuAcc);
741 TEST_CASE(
"FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest")
744 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
745 activationDescriptor.
m_A = 6.0f;
746 activationDescriptor.
m_B = 0.0f;
748 FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
749 (activationDescriptor, 0.0001f, Compute::CpuAcc);
753 TEST_CASE(
"FuseTanHIntoConvFloat32CpuAccTest")
756 activationDescriptor.
m_Function = ActivationFunction::TanH;
758 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
759 (activationDescriptor, 0.0001f, Compute::CpuAcc);
763 TEST_CASE(
"FuseHardSwishIntoConvFloat32CpuAccTest")
766 activationDescriptor.
m_Function = ActivationFunction::HardSwish;
768 FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
769 (activationDescriptor, 0.0001f, Compute::CpuAcc);
773 TEST_CASE(
"LayerFollowedByActivationFloat32CpuAccTest")
776 for (
int i = 0; i != 12; ++i)
779 activationDescriptor.
m_A = 1.0f;
780 activationDescriptor.
m_B = -1.0f;
781 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
782 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " << i);
783 CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
784 (activationDescriptor, Compute::CpuAcc)),
"DepthwiseConvolution + Activation function " << i);
785 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
786 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " << i);
787 CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
788 (activationDescriptor, Compute::CpuAcc)),
"BatchNorm + Activation function " << i);
791 TEST_CASE(
"LayerFollowedByActivationFloat16CpuAccTest")
794 for (
int i = 0; i != 12; ++i)
797 activationDescriptor.
m_A = 1.0f;
798 activationDescriptor.
m_B = -1.0f;
799 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
800 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " << i);
801 CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
802 (activationDescriptor, Compute::CpuAcc)),
"DepthwiseConvolution + Activation function " << i);
803 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
804 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " << i);
805 CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
806 (activationDescriptor, Compute::CpuAcc)),
"BatchNorm + Activation function " << i);
809 TEST_CASE(
"LayerFollowedByActivationQAsymmU8CpuAccTest")
813 activationDescriptor.
m_Function = ActivationFunction::Sigmoid;
814 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
815 (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)),
"Convolution + Activation function " <<
816 static_cast<int>(activationDescriptor.
m_Function));
817 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
818 (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)),
"FullyConnected + Activation function " <<
819 static_cast<int>(activationDescriptor.
m_Function));
821 activationDescriptor.
m_Function = ActivationFunction::TanH;
822 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
823 (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)),
"Convolution + Activation function " <<
824 static_cast<int>(activationDescriptor.
m_Function));
825 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
826 (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)),
"FullyConnected + Activation function " <<
827 static_cast<int>(activationDescriptor.
m_Function));
829 activationDescriptor.
m_Function = ActivationFunction::ReLu;
830 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
831 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
832 static_cast<int>(activationDescriptor.
m_Function));
833 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
834 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
835 static_cast<int>(activationDescriptor.
m_Function));
837 activationDescriptor.
m_Function = ActivationFunction::BoundedReLu;
838 activationDescriptor.
m_A = 1.0f;
839 activationDescriptor.
m_B = -1.0f;
840 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
841 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
842 static_cast<int>(activationDescriptor.
m_Function));
843 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
844 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
845 static_cast<int>(activationDescriptor.
m_Function));
847 activationDescriptor.
m_Function = ActivationFunction::HardSwish;
848 CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
849 (activationDescriptor, Compute::CpuAcc)),
"Convolution + Activation function " <<
850 static_cast<int>(activationDescriptor.
m_Function));
851 CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
852 (activationDescriptor, Compute::CpuAcc)),
"FullyConnected + Activation function " <<
853 static_cast<int>(activationDescriptor.
m_Function));
An ActivationDescriptor for the ActivationLayer.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).