ArmNN
 21.02
FuseActivationTests.cpp File Reference
#include "LayersFwd.hpp"
#include <Network.hpp>
#include <ResolveType.hpp>
#include <armnn/INetwork.hpp>
#include <test/TestUtils.hpp>
#include <boost/test/unit_test.hpp>
#include <QuantizeHelper.hpp>
#include <string>

Go to the source code of this file.

Namespaces

 armnn
 Copyright (c) 2021 ARM Limited and Contributors.
 

Functions

template<typename T >
std::vector< T > GetVector (unsigned int size, float initial, float increment)
 
template<typename LayerTest , DataType ArmnnType>
INetworkPtr CreatNetwork (ActivationDescriptor activationDescriptor, bool preventFusing, float scale, int32_t offset)
 
template<typename LayerTest , DataType ArmnnType, typename LayerType = typename LayerTest::LayerType, typename T = ResolveType<ArmnnType>>
void FuseActivationIntoPreviousLayerTest (ActivationDescriptor activationDescriptor, float tolerance, Compute backendId, float scale=1.f, int32_t offset=0)
 
template<typename LayerTest , DataType ArmnnType, typename LayerType = typename LayerTest::LayerType, typename T = ResolveType<ArmnnType>>
bool FuseActivationSimpleTest (ActivationDescriptor activationDescriptor, Compute backendId, float scale=1.f, int32_t offset=0)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDWConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoFullyConnectedFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoBatchNormFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDWConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoBatchNormFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoConvQAsymmU8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDWConvQAsymmU8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoConvQASymmS8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (LayerFollowedByActivationFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (LayerFollowedByActivationFloat16CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (LayerFollowedByActivationQAsymmU8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDWConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoFullyConnectedFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoBatchNormFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDivFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDWConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoFullyConnectedFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoBatchNormFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDivFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoConvFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDWConvFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoFullyConnectedFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoBatchNormFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoMulFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoAddFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoSubFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDivFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUQIntoConvAsymmU8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUQIntoDWConvAsymmU8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUQIntoFullyConnectedAsymmU8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoConvQASymmS8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDWConvQASymmS8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoFullyConnectedQASymmS8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoDivFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoDivFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (LayerFollowedByActivationFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (LayerFollowedByActivationFloat16GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (LayerFollowedByActivationQAsymmU8GpuAccTest)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE() [1/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoConvFloat32CpuAccTest  )

Definition at line 479 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

480 {
481  ActivationDescriptor activationDescriptor;
482  activationDescriptor.m_Function = ActivationFunction::ReLu;
483 
484  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
485  (activationDescriptor, 0.0001f, Compute::CpuAcc);
486 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [2/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDWConvFloat32CpuAccTest  )

Definition at line 487 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

488 {
489  ActivationDescriptor activationDescriptor;
490  activationDescriptor.m_Function = ActivationFunction::ReLu;
491 
492  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
493  (activationDescriptor, 0.0001f, Compute::CpuAcc);
494 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [3/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoFullyConnectedFloat32CpuAccTest  )

Definition at line 495 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

496 {
497  ActivationDescriptor activationDescriptor;
498  activationDescriptor.m_Function = ActivationFunction::ReLu;
499 
500  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
501  (activationDescriptor, 0.0001f, Compute::CpuAcc);
502 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [4/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoBatchNormFloat32CpuAccTest  )

Definition at line 503 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

504 {
505  ActivationDescriptor activationDescriptor;
506  activationDescriptor.m_Function = ActivationFunction::ReLu;
507 
508  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
509  (activationDescriptor, 0.0001f, Compute::CpuAcc);
510 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [5/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoConvFloat32CpuAccTest  )

Definition at line 513 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

514 {
515  ActivationDescriptor activationDescriptor;
516  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
517  activationDescriptor.m_A = 1.0f;
518  activationDescriptor.m_B = -1.0f;
519 
520  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
521  (activationDescriptor, 0.0001f, Compute::CpuAcc);
522 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [6/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDWConvFloat32CpuAccTest  )

Definition at line 523 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

524 {
525  ActivationDescriptor activationDescriptor;
526  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
527  activationDescriptor.m_A = 1.0f;
528  activationDescriptor.m_B = -1.0f;
529 
530  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::Float32 > , DataType::Float32 >
531  (activationDescriptor, 0.0001f, Compute::CpuAcc);
532 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [7/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest  )

Definition at line 533 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

534 {
535  ActivationDescriptor activationDescriptor;
536  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
537  activationDescriptor.m_A = 1.0f;
538  activationDescriptor.m_B = -1.0f;
539 
540  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
541  (activationDescriptor, 0.0001f, Compute::CpuAcc);
542 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [8/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoBatchNormFloat32CpuAccTest  )

Definition at line 543 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

544 {
545  ActivationDescriptor activationDescriptor;
546  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
547  activationDescriptor.m_A = 1.0f;
548  activationDescriptor.m_B = -1.0f;
549 
550  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
551  (activationDescriptor, 0.0001f, Compute::CpuAcc);
552 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [9/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoConvQAsymmU8CpuAccTest  )

Definition at line 555 of file FuseActivationTests.cpp.

References armnn::CpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

556 {
557  ActivationDescriptor activationDescriptor;
558  activationDescriptor.m_Function = ActivationFunction::ReLu;
559 
560  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
561  (activationDescriptor, 0.0001f, Compute::CpuAcc);
562 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [10/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDWConvQAsymmU8CpuAccTest  )

Definition at line 563 of file FuseActivationTests.cpp.

References armnn::CpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

564 {
565  ActivationDescriptor activationDescriptor;
566  activationDescriptor.m_Function = ActivationFunction::ReLu;
567 
568  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
569  (activationDescriptor, 0.0001f, Compute::CpuAcc);
570 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [11/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest  )

Definition at line 571 of file FuseActivationTests.cpp.

References armnn::CpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

572 {
573  ActivationDescriptor activationDescriptor;
574  activationDescriptor.m_Function = ActivationFunction::ReLu;
575 
576  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
577  (activationDescriptor, 0.0001f, Compute::CpuAcc);
578 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [12/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoConvQASymmS8CpuAccTest  )

Definition at line 581 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, and armnn::QAsymmS8.

582 {
583  ActivationDescriptor activationDescriptor;
584  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
585  activationDescriptor.m_A = 6.0f;
586  activationDescriptor.m_B = 0.0f;
587 
588  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
589  (activationDescriptor, 0.0001f, Compute::CpuAcc);
590 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [13/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDWConvQASymmS8CpuAccTest  )

Definition at line 591 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, and armnn::QAsymmS8.

592 {
593  ActivationDescriptor activationDescriptor;
594  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
595  activationDescriptor.m_A = 6.0f;
596  activationDescriptor.m_B = 0.0f;
597 
598  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
599  (activationDescriptor, 0.0001f, Compute::CpuAcc);
600 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [14/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoFullyConnectedQASymmS8CpuAccTest  )

Definition at line 601 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, and armnn::QAsymmS8.

602 {
603  ActivationDescriptor activationDescriptor;
604  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
605  activationDescriptor.m_A = 6.0f;
606  activationDescriptor.m_B = 0.0f;
607 
608  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
609  (activationDescriptor, 0.0001f, Compute::CpuAcc);
610 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [15/62]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoConvFloat32CpuAccTest  )

Definition at line 613 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::TanH.

614 {
615  ActivationDescriptor activationDescriptor;
616  activationDescriptor.m_Function = ActivationFunction::TanH;
617 
618  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
619  (activationDescriptor, 0.0001f, Compute::CpuAcc);
620 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [16/62]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoConvFloat32CpuAccTest  )

Definition at line 623 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, armnn::HardSwish, and ActivationDescriptor::m_Function.

624 {
625  ActivationDescriptor activationDescriptor;
626  activationDescriptor.m_Function = ActivationFunction::HardSwish;
627 
628  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
629  (activationDescriptor, 0.0001f, Compute::CpuAcc);
630 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [17/62]

BOOST_AUTO_TEST_CASE ( LayerFollowedByActivationFloat32CpuAccTest  )

Definition at line 633 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, armnn::FuseActivationSimpleTest(), ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

634 {
635  ActivationDescriptor activationDescriptor;
636  for (int i = 0; i != 12; ++i)
637  {
638  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
639  activationDescriptor.m_A = 1.0f;
640  activationDescriptor.m_B = -1.0f;
641  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
642  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
643  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
644  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
645  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
646  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
647  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
648  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
649  }
650 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute backendId, float scale=1.f, int32_t offset=0)
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48
ActivationFunction
Definition: Types.hpp:56

◆ BOOST_AUTO_TEST_CASE() [18/62]

BOOST_AUTO_TEST_CASE ( LayerFollowedByActivationFloat16CpuAccTest  )

Definition at line 651 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float16, armnn::FuseActivationSimpleTest(), ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

652 {
653  ActivationDescriptor activationDescriptor;
654  for (int i = 0; i != 12; ++i)
655  {
656  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
657  activationDescriptor.m_A = 1.0f;
658  activationDescriptor.m_B = -1.0f;
659  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
660  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " << i);
661  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
662  (activationDescriptor, Compute::CpuAcc)), "DepthwiseConvolution + Activation function " << i);
663  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
664  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " << i);
665  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
666  (activationDescriptor, Compute::CpuAcc)), "BatchNorm + Activation function " << i);
667  }
668 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute backendId, float scale=1.f, int32_t offset=0)
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48
ActivationFunction
Definition: Types.hpp:56

◆ BOOST_AUTO_TEST_CASE() [19/62]

BOOST_AUTO_TEST_CASE ( LayerFollowedByActivationQAsymmU8CpuAccTest  )

Definition at line 669 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::FuseActivationSimpleTest(), armnn::HardSwish, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, armnn::QAsymmU8, armnn::ReLu, armnn::Sigmoid, and armnn::TanH.

670 {
671  ActivationDescriptor activationDescriptor;
672 
673  activationDescriptor.m_Function = ActivationFunction::Sigmoid;
674  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
675  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "Convolution + Activation function " <<
676  static_cast<int>(activationDescriptor.m_Function));
677  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
678  (activationDescriptor, Compute::CpuAcc, 1.f / 256.f, 0)), "FullyConnected + Activation function " <<
679  static_cast<int>(activationDescriptor.m_Function));
680 
681  activationDescriptor.m_Function = ActivationFunction::TanH;
682  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
683  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "Convolution + Activation function " <<
684  static_cast<int>(activationDescriptor.m_Function));
685  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
686  (activationDescriptor, Compute::CpuAcc, 1.f / 128.f, 128)), "FullyConnected + Activation function " <<
687  static_cast<int>(activationDescriptor.m_Function));
688 
689  activationDescriptor.m_Function = ActivationFunction::ReLu;
690  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
691  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
692  static_cast<int>(activationDescriptor.m_Function));
693  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
694  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
695  static_cast<int>(activationDescriptor.m_Function));
696 
697  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
698  activationDescriptor.m_A = 1.0f;
699  activationDescriptor.m_B = -1.0f;
700  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
701  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
702  static_cast<int>(activationDescriptor.m_Function));
703  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
704  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
705  static_cast<int>(activationDescriptor.m_Function));
706 
707  activationDescriptor.m_Function = ActivationFunction::HardSwish;
708  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
709  (activationDescriptor, Compute::CpuAcc)), "Convolution + Activation function " <<
710  static_cast<int>(activationDescriptor.m_Function));
711  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
712  (activationDescriptor, Compute::CpuAcc)), "FullyConnected + Activation function " <<
713  static_cast<int>(activationDescriptor.m_Function));
714 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute backendId, float scale=1.f, int32_t offset=0)
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [20/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoConvFloat32GpuAccTest  )

Definition at line 719 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

720 {
721  ActivationDescriptor activationDescriptor;
722  activationDescriptor.m_Function = ActivationFunction::ReLu;
723 
724  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
725  (activationDescriptor, 0.0001f, Compute::GpuAcc);
726 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [21/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDWConvFloat32GpuAccTest  )

Definition at line 727 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

728 {
729  ActivationDescriptor activationDescriptor;
730  activationDescriptor.m_Function = ActivationFunction::ReLu;
731 
732  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
733  (activationDescriptor, 0.0001f, Compute::GpuAcc);
734 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [22/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoFullyConnectedFloat32GpuAccTest  )

Definition at line 735 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

736 {
737  ActivationDescriptor activationDescriptor;
738  activationDescriptor.m_Function = ActivationFunction::ReLu;
739 
740  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
741  (activationDescriptor, 0.0001f, Compute::GpuAcc);
742 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [23/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoBatchNormFloat32GpuAccTest  )

Definition at line 743 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

744 {
745  ActivationDescriptor activationDescriptor;
746  activationDescriptor.m_Function = ActivationFunction::ReLu;
747 
748  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
749  (activationDescriptor, 0.0001f, Compute::GpuAcc);
750 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [24/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoMulFloat32GpuAccTest  )

Definition at line 751 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

752 {
753  ActivationDescriptor activationDescriptor;
754  activationDescriptor.m_Function = ActivationFunction::ReLu;
755 
756  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
757  (activationDescriptor, 0.0001f, Compute::GpuAcc);
758 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [25/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoAddFloat32GpuAccTest  )

Definition at line 759 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

760 {
761  ActivationDescriptor activationDescriptor;
762  activationDescriptor.m_Function = ActivationFunction::ReLu;
763 
764  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
765  (activationDescriptor, 0.0001f, Compute::GpuAcc);
766 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [26/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoSubFloat32GpuAccTest  )

Definition at line 767 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

768 {
769  ActivationDescriptor activationDescriptor;
770  activationDescriptor.m_Function = ActivationFunction::ReLu;
771 
772  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
773  (activationDescriptor, 0.0001f, Compute::GpuAcc);
774 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [27/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDivFloat32GpuAccTest  )

Definition at line 775 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

776 {
777  ActivationDescriptor activationDescriptor;
778  activationDescriptor.m_Function = ActivationFunction::ReLu;
779 
780  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
781  (activationDescriptor, 0.0001f, Compute::GpuAcc);
782 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [28/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoConvFloat32GpuAccTest  )

Definition at line 785 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

786 {
787  ActivationDescriptor activationDescriptor;
788  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
789  activationDescriptor.m_A = 1.0f;
790  activationDescriptor.m_B = -1.0f;
791 
792  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
793  (activationDescriptor, 0.0001f, Compute::GpuAcc);
794 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [29/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDWConvFloat32GpuAccTest  )

Definition at line 795 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

796 {
797  ActivationDescriptor activationDescriptor;
798  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
799  activationDescriptor.m_A = 1.0f;
800  activationDescriptor.m_B = -1.0f;
801 
802  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
803  (activationDescriptor, 0.0001f, Compute::GpuAcc);
804 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [30/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoFullyConnectedFloat32GpuAccTest  )

Definition at line 805 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

806 {
807  ActivationDescriptor activationDescriptor;
808  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
809  activationDescriptor.m_A = 1.0f;
810  activationDescriptor.m_B = -1.0f;
811 
812  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
813  (activationDescriptor, 0.0001f, Compute::GpuAcc);
814 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [31/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoBatchNormFloat32GpuAccTest  )

Definition at line 815 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

816 {
817  ActivationDescriptor activationDescriptor;
818  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
819  activationDescriptor.m_A = 1.0f;
820  activationDescriptor.m_B = -1.0f;
821 
822  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
823  (activationDescriptor, 0.0001f, Compute::GpuAcc);
824 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [32/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoMulFloat32GpuAccTest  )

Definition at line 825 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

826 {
827  ActivationDescriptor activationDescriptor;
828  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
829  activationDescriptor.m_A = 1.0f;
830  activationDescriptor.m_B = -1.0f;
831 
832  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
833  (activationDescriptor, 0.0001f, Compute::GpuAcc);
834 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [33/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoAddFloat32GpuAccTest  )

Definition at line 835 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

836 {
837  ActivationDescriptor activationDescriptor;
838  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
839  activationDescriptor.m_A = 1.0f;
840  activationDescriptor.m_B = -1.0f;
841 
842  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
843  (activationDescriptor, 0.0001f, Compute::GpuAcc);
844 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [34/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoSubFloat32GpuAccTest  )

Definition at line 845 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

846 {
847  ActivationDescriptor activationDescriptor;
848  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
849  activationDescriptor.m_A = 1.0f;
850  activationDescriptor.m_B = -1.0f;
851 
852  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
853  (activationDescriptor, 0.0001f, Compute::GpuAcc);
854 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [35/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDivFloat32GpuAccTest  )

Definition at line 855 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

856 {
857  ActivationDescriptor activationDescriptor;
858  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
859  activationDescriptor.m_A = 1.0f;
860  activationDescriptor.m_B = -1.0f;
861 
862  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
863  (activationDescriptor, 0.0001f, Compute::GpuAcc);
864 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [36/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoConvFloat16GpuAccTest  )

Definition at line 867 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

868 {
869  ActivationDescriptor activationDescriptor;
870  activationDescriptor.m_Function = ActivationFunction::ReLu;
871 
872  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
873  (activationDescriptor, 0.0001f, Compute::GpuAcc);
874 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [37/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDWConvFloat16GpuAccTest  )

Definition at line 875 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

876 {
877  ActivationDescriptor activationDescriptor;
878  activationDescriptor.m_Function = ActivationFunction::ReLu;
879 
880  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
881  (activationDescriptor, 0.0001f, Compute::GpuAcc);
882 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [38/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoFullyConnectedFloat16GpuAccTest  )

Definition at line 883 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

884 {
885  ActivationDescriptor activationDescriptor;
886  activationDescriptor.m_Function = ActivationFunction::ReLu;
887 
888  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
889  (activationDescriptor, 0.0001f, Compute::GpuAcc);
890 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [39/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoBatchNormFloat16GpuAccTest  )

Definition at line 891 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

892 {
893  ActivationDescriptor activationDescriptor;
894  activationDescriptor.m_Function = ActivationFunction::ReLu;
895 
896  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float16>, DataType::Float16>
897  (activationDescriptor, 0.0001f, Compute::GpuAcc);
898 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [40/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoMulFloat16GpuAccTest  )

Definition at line 899 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

900 {
901  ActivationDescriptor activationDescriptor;
902  activationDescriptor.m_Function = ActivationFunction::ReLu;
903 
904  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float16>, DataType::Float16>
905  (activationDescriptor, 0.0001f, Compute::GpuAcc);
906 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [41/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoAddFloat16GpuAccTest  )

Definition at line 907 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

908 {
909  ActivationDescriptor activationDescriptor;
910  activationDescriptor.m_Function = ActivationFunction::ReLu;
911 
912  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float16>, DataType::Float16>
913  (activationDescriptor, 0.0001f, Compute::GpuAcc);
914 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [42/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoSubFloat16GpuAccTest  )

Definition at line 915 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

916 {
917  ActivationDescriptor activationDescriptor;
918  activationDescriptor.m_Function = ActivationFunction::ReLu;
919 
920  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float16>, DataType::Float16>
921  (activationDescriptor, 0.0001f, Compute::GpuAcc);
922 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [43/62]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDivFloat16GpuAccTest  )

Definition at line 923 of file FuseActivationTests.cpp.

References armnn::Float16, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

924 {
925  ActivationDescriptor activationDescriptor;
926  activationDescriptor.m_Function = ActivationFunction::ReLu;
927 
928  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float16>, DataType::Float16>
929  (activationDescriptor, 0.0001f, Compute::GpuAcc);
930 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [44/62]

BOOST_AUTO_TEST_CASE ( FuseReLUQIntoConvAsymmU8GpuAccTest  )

Definition at line 933 of file FuseActivationTests.cpp.

References armnn::GpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

934 {
935  ActivationDescriptor activationDescriptor;
936  activationDescriptor.m_Function = ActivationFunction::ReLu;
937 
938  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
939  (activationDescriptor, 0.0001f, Compute::GpuAcc);
940 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [45/62]

BOOST_AUTO_TEST_CASE ( FuseReLUQIntoDWConvAsymmU8GpuAccTest  )

Definition at line 941 of file FuseActivationTests.cpp.

References armnn::GpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

942 {
943  ActivationDescriptor activationDescriptor;
944  activationDescriptor.m_Function = ActivationFunction::ReLu;
945 
946  FuseActivationIntoPreviousLayerTest<DWConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
947  (activationDescriptor, 0.0001f, Compute::GpuAcc);
948 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [46/62]

BOOST_AUTO_TEST_CASE ( FuseReLUQIntoFullyConnectedAsymmU8GpuAccTest  )

Definition at line 949 of file FuseActivationTests.cpp.

References armnn::GpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

950 {
951  ActivationDescriptor activationDescriptor;
952  activationDescriptor.m_Function = ActivationFunction::ReLu;
953 
954  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
955  (activationDescriptor, 0.0001f, Compute::GpuAcc);
956 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [47/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoConvQASymmS8GpuAccTest  )

Definition at line 959 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, and armnn::QAsymmS8.

960 {
961  ActivationDescriptor activationDescriptor;
962  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
963  activationDescriptor.m_A = 6.0f;
964  activationDescriptor.m_B = 0.0f;
965 
966  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmS8>, DataType::QAsymmS8>
967  (activationDescriptor, 0.0001f, Compute::GpuAcc);
968 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [48/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDWConvQASymmS8GpuAccTest  )

Definition at line 969 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, and armnn::QAsymmS8.

970 {
971  ActivationDescriptor activationDescriptor;
972  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
973  activationDescriptor.m_A = 6.0f;
974  activationDescriptor.m_B = 0.0f;
975 
976  FuseActivationIntoPreviousLayerTest < DWConvolution2dTest < DataType::QAsymmS8 > , DataType::QAsymmS8 >
977  (activationDescriptor, 0.0001f, Compute::GpuAcc);
978 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [49/62]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoFullyConnectedQASymmS8GpuAccTest  )

Definition at line 979 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, and armnn::QAsymmS8.

980 {
981  ActivationDescriptor activationDescriptor;
982  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
983  activationDescriptor.m_A = 6.0f;
984  activationDescriptor.m_B = 0.0f;
985 
986  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmS8>, DataType::QAsymmS8>
987  (activationDescriptor, 0.0001f, Compute::GpuAcc);
988 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [50/62]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoConvFloat32GpuAccTest  )

Definition at line 991 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

992 {
993  ActivationDescriptor activationDescriptor;
994  activationDescriptor.m_Function = ActivationFunction::TanH;
995 
996  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
997  (activationDescriptor, 0.0001f, Compute::GpuAcc);
998 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [51/62]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoMulFloat32GpuAccTest  )

Definition at line 999 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

1000 {
1001  ActivationDescriptor activationDescriptor;
1002  activationDescriptor.m_Function = ActivationFunction::TanH;
1003 
1004  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
1005  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1006 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [52/62]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoAddFloat32GpuAccTest  )

Definition at line 1007 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

1008 {
1009  ActivationDescriptor activationDescriptor;
1010  activationDescriptor.m_Function = ActivationFunction::TanH;
1011 
1012  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
1013  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1014 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [53/62]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoSubFloat32GpuAccTest  )

Definition at line 1015 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

1016 {
1017  ActivationDescriptor activationDescriptor;
1018  activationDescriptor.m_Function = ActivationFunction::TanH;
1019 
1020  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
1021  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1022 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [54/62]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoDivFloat32GpuAccTest  )

Definition at line 1023 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

1024 {
1025  ActivationDescriptor activationDescriptor;
1026  activationDescriptor.m_Function = ActivationFunction::TanH;
1027 
1028  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
1029  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1030 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [55/62]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoConvFloat32GpuAccTest  )

Definition at line 1033 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

1034 {
1035  ActivationDescriptor activationDescriptor;
1036  activationDescriptor.m_Function = ActivationFunction::HardSwish;
1037 
1038  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
1039  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1040 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [56/62]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoMulFloat32GpuAccTest  )

Definition at line 1041 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

1042 {
1043  ActivationDescriptor activationDescriptor;
1044  activationDescriptor.m_Function = ActivationFunction::HardSwish;
1045 
1046  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
1047  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1048 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [57/62]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoAddFloat32GpuAccTest  )

Definition at line 1049 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

1050 {
1051  ActivationDescriptor activationDescriptor;
1052  activationDescriptor.m_Function = ActivationFunction::HardSwish;
1053 
1054  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
1055  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1056 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [58/62]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoSubFloat32GpuAccTest  )

Definition at line 1057 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

1058 {
1059  ActivationDescriptor activationDescriptor;
1060  activationDescriptor.m_Function = ActivationFunction::HardSwish;
1061 
1062  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
1063  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1064 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [59/62]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoDivFloat32GpuAccTest  )

Definition at line 1065 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

1066 {
1067  ActivationDescriptor activationDescriptor;
1068  activationDescriptor.m_Function = ActivationFunction::HardSwish;
1069 
1070  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
1071  (activationDescriptor, 0.0001f, Compute::GpuAcc);
1072 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48

◆ BOOST_AUTO_TEST_CASE() [60/62]

BOOST_AUTO_TEST_CASE ( LayerFollowedByActivationFloat32GpuAccTest  )

Definition at line 1075 of file FuseActivationTests.cpp.

References armnn::Elu, armnn::Float32, armnn::FuseActivationSimpleTest(), armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

1076 {
1077  ActivationDescriptor activationDescriptor;
1078  for (int i = 0; i != 12; ++i)
1079  {
1080  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
1081  activationDescriptor.m_A = 1.0f;
1082  activationDescriptor.m_B = -1.0f;
1083  if (activationDescriptor.m_Function != ActivationFunction::Elu)
1084  {
1085  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
1086  (activationDescriptor, Compute::GpuAcc)), "Convolution + Activation function " << i);
1087  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float32>, DataType::Float32>
1088  (activationDescriptor, Compute::GpuAcc)), "DepthwiseConvolution + Activation function " << i);
1089  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
1090  (activationDescriptor, Compute::GpuAcc)), "FullyConnected + Activation function " << i);
1091  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float32>, DataType::Float32>
1092  (activationDescriptor, Compute::GpuAcc)), "BatchNorm + Activation function " << i);
1093  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
1094  (activationDescriptor, Compute::GpuAcc)), "Multiplication + Activation function " << i);
1095  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<AdditionTest<DataType::Float32>, DataType::Float32>
1096  (activationDescriptor, Compute::GpuAcc)), "Addition + Activation function " << i);
1097  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<SubtractionTest<DataType::Float32>, DataType::Float32>
1098  (activationDescriptor, Compute::GpuAcc)), "Subtraction + Activation function " << i);
1099  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<DivisionTest<DataType::Float32>, DataType::Float32>
1100  (activationDescriptor, Compute::GpuAcc)), "Division + Activation function " << i);
1101  }
1102  }
1103 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute backendId, float scale=1.f, int32_t offset=0)
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48
ActivationFunction
Definition: Types.hpp:56

◆ BOOST_AUTO_TEST_CASE() [61/62]

BOOST_AUTO_TEST_CASE ( LayerFollowedByActivationFloat16GpuAccTest  )

Definition at line 1104 of file FuseActivationTests.cpp.

References armnn::Elu, armnn::Float16, armnn::FuseActivationSimpleTest(), armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

1105 {
1106  ActivationDescriptor activationDescriptor;
1107  for (int i = 0; i != 12; ++i)
1108  {
1109  activationDescriptor.m_Function = static_cast<ActivationFunction>(i);
1110  activationDescriptor.m_A = 1.0f;
1111  activationDescriptor.m_B = -1.0f;
1112  if (activationDescriptor.m_Function != ActivationFunction::Elu)
1113  {
1114  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::Float16>, DataType::Float16>
1115  (activationDescriptor, Compute::GpuAcc)), "Convolution + Activation function " << i);
1116  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<DWConvolution2dTest<DataType::Float16>, DataType::Float16>
1117  (activationDescriptor, Compute::GpuAcc)), "Depthwise + Activation function " << i);
1118  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::Float16>, DataType::Float16>
1119  (activationDescriptor, Compute::GpuAcc)), "FullyConnected + Activation function " << i);
1120  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<BatchNormTest<DataType::Float16>, DataType::Float16>
1121  (activationDescriptor, Compute::GpuAcc)), "BatchNorm + Activation function " << i);
1122  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<MultiplicationTest<DataType::Float16>, DataType::Float16>
1123  (activationDescriptor, Compute::GpuAcc)), "Multiplication + Activation function " << i);
1124  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<AdditionTest<DataType::Float16>, DataType::Float16>
1125  (activationDescriptor, Compute::GpuAcc)), "Addition + Activation function " << i);
1126  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<SubtractionTest<DataType::Float16>, DataType::Float16>
1127  (activationDescriptor, Compute::GpuAcc)), "Subtraction + Activation function " << i);
1128  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<DivisionTest<DataType::Float16>, DataType::Float16>
1129  (activationDescriptor, Compute::GpuAcc)), "Division + Activation function " << i);
1130  }
1131  }
1132 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute backendId, float scale=1.f, int32_t offset=0)
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48
ActivationFunction
Definition: Types.hpp:56

◆ BOOST_AUTO_TEST_CASE() [62/62]

BOOST_AUTO_TEST_CASE ( LayerFollowedByActivationQAsymmU8GpuAccTest  )

Definition at line 1133 of file FuseActivationTests.cpp.

References BOOST_AUTO_TEST_SUITE_END(), armnn::BoundedReLu, armnn::FuseActivationSimpleTest(), armnn::GpuAcc, armnn::HardSwish, ActivationDescriptor::m_A, ActivationDescriptor::m_B, ActivationDescriptor::m_Function, armnn::QAsymmU8, armnn::ReLu, armnn::Sigmoid, and armnn::TanH.

1134 {
1135  ActivationDescriptor activationDescriptor;
1136 
1137  activationDescriptor.m_Function = ActivationFunction::Sigmoid;
1138  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1139  (activationDescriptor, Compute::GpuAcc, 1.f / 256.f, 0)), "Convolution + Activation function " <<
1140  static_cast<int>(activationDescriptor.m_Function));
1141  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1142  (activationDescriptor, Compute::GpuAcc, 1.f / 256.f, 0)), "FullyConnected + Activation function " <<
1143  static_cast<int>(activationDescriptor.m_Function));
1144 
1145  activationDescriptor.m_Function = ActivationFunction::TanH;
1146  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1147  (activationDescriptor, Compute::GpuAcc, 1.f / 128.f, 128)), "Convolution + Activation function " <<
1148  static_cast<int>(activationDescriptor.m_Function));
1149  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1150  (activationDescriptor, Compute::GpuAcc, 1.f / 128.f, 128)), "FullyConnected + Activation function " <<
1151  static_cast<int>(activationDescriptor.m_Function));
1152 
1153  activationDescriptor.m_Function = ActivationFunction::ReLu;
1154  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1155  (activationDescriptor, Compute::GpuAcc)), "Convolution + Activation function " <<
1156  static_cast<int>(activationDescriptor.m_Function));
1157  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1158  (activationDescriptor, Compute::GpuAcc)), "FullyConnected + Activation function " <<
1159  static_cast<int>(activationDescriptor.m_Function));
1160 
1161  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
1162  activationDescriptor.m_A = 1.0f;
1163  activationDescriptor.m_B = -1.0f;
1164  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1165  (activationDescriptor, Compute::GpuAcc)), "Convolution + Activation function " <<
1166  static_cast<int>(activationDescriptor.m_Function));
1167  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1168  (activationDescriptor, Compute::GpuAcc)), "FullyConnected + Activation function " <<
1169  static_cast<int>(activationDescriptor.m_Function));
1170 
1171  activationDescriptor.m_Function = ActivationFunction::HardSwish;
1172  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1173  (activationDescriptor, Compute::GpuAcc)), "Convolution + Activation function " <<
1174  static_cast<int>(activationDescriptor.m_Function));
1175  BOOST_CHECK_MESSAGE((FuseActivationSimpleTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
1176  (activationDescriptor, Compute::GpuAcc)), "FullyConnected + Activation function " <<
1177  static_cast<int>(activationDescriptor.m_Function));
1178 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
bool FuseActivationSimpleTest(ActivationDescriptor activationDescriptor, Compute backendId, float scale=1.f, int32_t offset=0)
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:52
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48