ArmNN
 20.11
FuseActivationTests.cpp File Reference
#include "LayersFwd.hpp"
#include <Network.hpp>
#include <ResolveType.hpp>
#include <armnn/INetwork.hpp>
#include <test/TestUtils.hpp>
#include <boost/test/unit_test.hpp>
#include <QuantizeHelper.hpp>
#include <string>

Go to the source code of this file.

Functions

template<typename LayerTest , armnn::DataType ArmnnType>
INetworkPtr CreatNetwork (ActivationDescriptor activationDescriptor, bool preventFusing)
 
template<typename LayerTest , armnn::DataType ArmnnType, typename LayerType = typename LayerTest::LayerType, typename T = armnn::ResolveType<ArmnnType>>
void FuseActivationIntoPreviousLayerTest (ActivationDescriptor activationDescriptor, float tolerance, armnn::Compute backendId)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDWConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoFullyConnectedFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoBatchNormFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDWConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoBatchNormFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoConvQAsymmU8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDWConvQAsymmU8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoConvFloat32CpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDWConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoFullyConnectedFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoBatchNormFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUIntoDivFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDWConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoFullyConnectedFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoBatchNormFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseBoundedReLUIntoDivFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUQIntoConvAsymmU8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUQIntoDWConvAsymmU8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseReLUQIntoFullyConnectedAsymmU8GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseHardSwishIntoDivFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoConvFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoMulFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoAddFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoSubFloat32GpuAccTest)
 
 BOOST_AUTO_TEST_CASE (FuseTanHIntoDivFloat32GpuAccTest)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE() [1/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoConvFloat32CpuAccTest  )

Definition at line 407 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

408 {
409  ActivationDescriptor activationDescriptor;
410  activationDescriptor.m_Function = ActivationFunction::ReLu;
411 
412  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
413  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
414 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [2/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDWConvFloat32CpuAccTest  )

Definition at line 415 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

416 {
417  ActivationDescriptor activationDescriptor;
418  activationDescriptor.m_Function = ActivationFunction::ReLu;
419 
420  FuseActivationIntoPreviousLayerTest<DepthwiseConvolution2dTest<DataType::Float32>, DataType::Float32>
421  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
422 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [3/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoFullyConnectedFloat32CpuAccTest  )

Definition at line 423 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

424 {
425  ActivationDescriptor activationDescriptor;
426  activationDescriptor.m_Function = ActivationFunction::ReLu;
427 
428  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
429  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
430 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [4/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoBatchNormFloat32CpuAccTest  )

Definition at line 431 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::ReLu.

432 {
433  ActivationDescriptor activationDescriptor;
434  activationDescriptor.m_Function = ActivationFunction::ReLu;
435 
436  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
437  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
438 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [5/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoConvFloat32CpuAccTest  )

Definition at line 441 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

442 {
443  ActivationDescriptor activationDescriptor;
444  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
445  activationDescriptor.m_A = 1.0f;
446  activationDescriptor.m_B = -1.0f;
447 
448  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
449  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
450 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
CPU Execution: NEON: ArmCompute.
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [6/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDWConvFloat32CpuAccTest  )

Definition at line 451 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

452 {
453  ActivationDescriptor activationDescriptor;
454  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
455  activationDescriptor.m_A = 1.0f;
456  activationDescriptor.m_B = -1.0f;
457 
458  FuseActivationIntoPreviousLayerTest < DepthwiseConvolution2dTest < DataType::Float32 > , DataType::Float32 >
459  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
460 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
CPU Execution: NEON: ArmCompute.
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [7/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoFullyConnectedFloat32CpuAccTest  )

Definition at line 461 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

462 {
463  ActivationDescriptor activationDescriptor;
464  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
465  activationDescriptor.m_A = 1.0f;
466  activationDescriptor.m_B = -1.0f;
467 
468  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
469  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
470 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
CPU Execution: NEON: ArmCompute.
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [8/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoBatchNormFloat32CpuAccTest  )

Definition at line 471 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

472 {
473  ActivationDescriptor activationDescriptor;
474  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
475  activationDescriptor.m_A = 1.0f;
476  activationDescriptor.m_B = -1.0f;
477 
478  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
479  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
480 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
CPU Execution: NEON: ArmCompute.
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [9/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoConvQAsymmU8CpuAccTest  )

Definition at line 483 of file FuseActivationTests.cpp.

References armnn::CpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

484 {
485  ActivationDescriptor activationDescriptor;
486  activationDescriptor.m_Function = ActivationFunction::ReLu;
487 
488  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
489  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
490 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [10/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDWConvQAsymmU8CpuAccTest  )

Definition at line 491 of file FuseActivationTests.cpp.

References armnn::CpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

492 {
493  ActivationDescriptor activationDescriptor;
494  activationDescriptor.m_Function = ActivationFunction::ReLu;
495 
496  FuseActivationIntoPreviousLayerTest<DepthwiseConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
497  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
498 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [11/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoFullyConnectedQAsymmU8CpuAccTest  )

Definition at line 499 of file FuseActivationTests.cpp.

References armnn::CpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

500 {
501  ActivationDescriptor activationDescriptor;
502  activationDescriptor.m_Function = ActivationFunction::ReLu;
503 
504  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
505  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
506 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [12/42]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoConvFloat32CpuAccTest  )

Definition at line 509 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, armnn::HardSwish, and ActivationDescriptor::m_Function.

510 {
511  ActivationDescriptor activationDescriptor;
512  activationDescriptor.m_Function = ActivationFunction::HardSwish;
513 
514  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
515  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
516 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [13/42]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoConvFloat32CpuAccTest  )

Definition at line 519 of file FuseActivationTests.cpp.

References armnn::CpuAcc, armnn::Float32, ActivationDescriptor::m_Function, and armnn::TanH.

520 {
521  ActivationDescriptor activationDescriptor;
522  activationDescriptor.m_Function = ActivationFunction::TanH;
523 
524  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
525  (activationDescriptor, 0.0001f, armnn::Compute::CpuAcc);
526 }
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
CPU Execution: NEON: ArmCompute.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [14/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoConvFloat32GpuAccTest  )

Definition at line 531 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

532 {
533  ActivationDescriptor activationDescriptor;
534  activationDescriptor.m_Function = ActivationFunction::ReLu;
535 
536  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
537  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
538 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [15/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDWConvFloat32GpuAccTest  )

Definition at line 539 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

540 {
541  ActivationDescriptor activationDescriptor;
542  activationDescriptor.m_Function = ActivationFunction::ReLu;
543 
544  FuseActivationIntoPreviousLayerTest<DepthwiseConvolution2dTest<DataType::Float32>, DataType::Float32>
545  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
546 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [16/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoFullyConnectedFloat32GpuAccTest  )

Definition at line 547 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

548 {
549  ActivationDescriptor activationDescriptor;
550  activationDescriptor.m_Function = ActivationFunction::ReLu;
551 
552  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
553  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
554 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [17/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoBatchNormFloat32GpuAccTest  )

Definition at line 555 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

556 {
557  ActivationDescriptor activationDescriptor;
558  activationDescriptor.m_Function = ActivationFunction::ReLu;
559 
560  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
561  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
562 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [18/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoMulFloat32GpuAccTest  )

Definition at line 563 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

564 {
565  ActivationDescriptor activationDescriptor;
566  activationDescriptor.m_Function = ActivationFunction::ReLu;
567 
568  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
569  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
570 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [19/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoAddFloat32GpuAccTest  )

Definition at line 571 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

572 {
573  ActivationDescriptor activationDescriptor;
574  activationDescriptor.m_Function = ActivationFunction::ReLu;
575 
576  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
577  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
578 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [20/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoSubFloat32GpuAccTest  )

Definition at line 579 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

580 {
581  ActivationDescriptor activationDescriptor;
582  activationDescriptor.m_Function = ActivationFunction::ReLu;
583 
584  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
585  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
586 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [21/42]

BOOST_AUTO_TEST_CASE ( FuseReLUIntoDivFloat32GpuAccTest  )

Definition at line 587 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::ReLu.

588 {
589  ActivationDescriptor activationDescriptor;
590  activationDescriptor.m_Function = ActivationFunction::ReLu;
591 
592  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
593  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
594 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [22/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoConvFloat32GpuAccTest  )

Definition at line 597 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

598 {
599  ActivationDescriptor activationDescriptor;
600  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
601  activationDescriptor.m_A = 1.0f;
602  activationDescriptor.m_B = -1.0f;
603 
604  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
605  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
606 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [23/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDWConvFloat32GpuAccTest  )

Definition at line 607 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

608 {
609  ActivationDescriptor activationDescriptor;
610  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
611  activationDescriptor.m_A = 1.0f;
612  activationDescriptor.m_B = -1.0f;
613 
614  FuseActivationIntoPreviousLayerTest<DepthwiseConvolution2dTest<DataType::Float32>, DataType::Float32>
615  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
616 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [24/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoFullyConnectedFloat32GpuAccTest  )

Definition at line 617 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

618 {
619  ActivationDescriptor activationDescriptor;
620  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
621  activationDescriptor.m_A = 1.0f;
622  activationDescriptor.m_B = -1.0f;
623 
624  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::Float32>, DataType::Float32>
625  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
626 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [25/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoBatchNormFloat32GpuAccTest  )

Definition at line 627 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

628 {
629  ActivationDescriptor activationDescriptor;
630  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
631  activationDescriptor.m_A = 1.0f;
632  activationDescriptor.m_B = -1.0f;
633 
634  FuseActivationIntoPreviousLayerTest<BatchNormTest<DataType::Float32>, DataType::Float32>
635  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
636 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [26/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoMulFloat32GpuAccTest  )

Definition at line 637 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

638 {
639  ActivationDescriptor activationDescriptor;
640  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
641  activationDescriptor.m_A = 1.0f;
642  activationDescriptor.m_B = -1.0f;
643 
644  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
645  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
646 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [27/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoAddFloat32GpuAccTest  )

Definition at line 647 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

648 {
649  ActivationDescriptor activationDescriptor;
650  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
651  activationDescriptor.m_A = 1.0f;
652  activationDescriptor.m_B = -1.0f;
653 
654  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
655  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
656 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [28/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoSubFloat32GpuAccTest  )

Definition at line 657 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

658 {
659  ActivationDescriptor activationDescriptor;
660  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
661  activationDescriptor.m_A = 1.0f;
662  activationDescriptor.m_B = -1.0f;
663 
664  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
665  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
666 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [29/42]

BOOST_AUTO_TEST_CASE ( FuseBoundedReLUIntoDivFloat32GpuAccTest  )

Definition at line 667 of file FuseActivationTests.cpp.

References armnn::BoundedReLu, armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_A, ActivationDescriptor::m_B, and ActivationDescriptor::m_Function.

668 {
669  ActivationDescriptor activationDescriptor;
670  activationDescriptor.m_Function = ActivationFunction::BoundedReLu;
671  activationDescriptor.m_A = 1.0f;
672  activationDescriptor.m_B = -1.0f;
673 
674  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
675  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
676 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [30/42]

BOOST_AUTO_TEST_CASE ( FuseReLUQIntoConvAsymmU8GpuAccTest  )

Definition at line 679 of file FuseActivationTests.cpp.

References armnn::GpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

680 {
681  ActivationDescriptor activationDescriptor;
682  activationDescriptor.m_Function = ActivationFunction::ReLu;
683 
684  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
685  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
686 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [31/42]

BOOST_AUTO_TEST_CASE ( FuseReLUQIntoDWConvAsymmU8GpuAccTest  )

Definition at line 687 of file FuseActivationTests.cpp.

References armnn::GpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

688 {
689  ActivationDescriptor activationDescriptor;
690  activationDescriptor.m_Function = ActivationFunction::ReLu;
691 
692  FuseActivationIntoPreviousLayerTest<DepthwiseConvolution2dTest<DataType::QAsymmU8>, DataType::QAsymmU8>
693  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
694 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [32/42]

BOOST_AUTO_TEST_CASE ( FuseReLUQIntoFullyConnectedAsymmU8GpuAccTest  )

Definition at line 695 of file FuseActivationTests.cpp.

References armnn::GpuAcc, ActivationDescriptor::m_Function, armnn::QAsymmU8, and armnn::ReLu.

696 {
697  ActivationDescriptor activationDescriptor;
698  activationDescriptor.m_Function = ActivationFunction::ReLu;
699 
700  FuseActivationIntoPreviousLayerTest<FullyConnectedTest<DataType::QAsymmU8>, DataType::QAsymmU8>
701  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
702 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [33/42]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoConvFloat32GpuAccTest  )

Definition at line 705 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

706 {
707  ActivationDescriptor activationDescriptor;
708  activationDescriptor.m_Function = ActivationFunction::HardSwish;
709 
710  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
711  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
712 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [34/42]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoMulFloat32GpuAccTest  )

Definition at line 713 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

714 {
715  ActivationDescriptor activationDescriptor;
716  activationDescriptor.m_Function = ActivationFunction::HardSwish;
717 
718  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
719  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
720 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [35/42]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoAddFloat32GpuAccTest  )

Definition at line 721 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

722 {
723  ActivationDescriptor activationDescriptor;
724  activationDescriptor.m_Function = ActivationFunction::HardSwish;
725 
726  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
727  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
728 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [36/42]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoSubFloat32GpuAccTest  )

Definition at line 729 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

730 {
731  ActivationDescriptor activationDescriptor;
732  activationDescriptor.m_Function = ActivationFunction::HardSwish;
733 
734  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
735  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
736 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [37/42]

BOOST_AUTO_TEST_CASE ( FuseHardSwishIntoDivFloat32GpuAccTest  )

Definition at line 737 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, armnn::HardSwish, and ActivationDescriptor::m_Function.

738 {
739  ActivationDescriptor activationDescriptor;
740  activationDescriptor.m_Function = ActivationFunction::HardSwish;
741 
742  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
743  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
744 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [38/42]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoConvFloat32GpuAccTest  )

Definition at line 747 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

748 {
749  ActivationDescriptor activationDescriptor;
750  activationDescriptor.m_Function = ActivationFunction::TanH;
751 
752  FuseActivationIntoPreviousLayerTest<Convolution2dTest<DataType::Float32>, DataType::Float32>
753  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
754 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [39/42]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoMulFloat32GpuAccTest  )

Definition at line 755 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

756 {
757  ActivationDescriptor activationDescriptor;
758  activationDescriptor.m_Function = ActivationFunction::TanH;
759 
760  FuseActivationIntoPreviousLayerTest<MultiplicationTest<DataType::Float32>, DataType::Float32>
761  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
762 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [40/42]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoAddFloat32GpuAccTest  )

Definition at line 763 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

764 {
765  ActivationDescriptor activationDescriptor;
766  activationDescriptor.m_Function = ActivationFunction::TanH;
767 
768  FuseActivationIntoPreviousLayerTest<AdditionTest<DataType::Float32>, DataType::Float32>
769  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
770 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [41/42]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoSubFloat32GpuAccTest  )

Definition at line 771 of file FuseActivationTests.cpp.

References armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

772 {
773  ActivationDescriptor activationDescriptor;
774  activationDescriptor.m_Function = ActivationFunction::TanH;
775 
776  FuseActivationIntoPreviousLayerTest<SubtractionTest<DataType::Float32>, DataType::Float32>
777  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
778 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ BOOST_AUTO_TEST_CASE() [42/42]

BOOST_AUTO_TEST_CASE ( FuseTanHIntoDivFloat32GpuAccTest  )

Definition at line 779 of file FuseActivationTests.cpp.

References BOOST_AUTO_TEST_SUITE_END(), armnn::Float32, armnn::GpuAcc, ActivationDescriptor::m_Function, and armnn::TanH.

780 {
781  ActivationDescriptor activationDescriptor;
782  activationDescriptor.m_Function = ActivationFunction::TanH;
783 
784  FuseActivationIntoPreviousLayerTest<DivisionTest<DataType::Float32>, DataType::Float32>
785  (activationDescriptor, 0.0001f, armnn::Compute::GpuAcc);
786 }
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43

◆ CreatNetwork()

INetworkPtr CreatNetwork ( ActivationDescriptor  activationDescriptor,
bool  preventFusing 
)

Definition at line 268 of file FuseActivationTests.cpp.

References IOutputSlot::Connect(), INetwork::Create(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), and IOutputSlot::SetTensorInfo().

269 {
270  // Create a network
271  INetworkPtr network = INetwork::Create();
272 
273  IConnectableLayer* inputLayer = network->AddInputLayer(0);
274 
275  IConnectableLayer* receiverLayer = LayerTest::AddReceiverLayer(network.get(),
276  "receiverLayer");
277 
278  IConnectableLayer* activationLayer = network->AddActivationLayer(activationDescriptor,
279  "activation");
280 
281  IConnectableLayer* outputLayer = network->AddOutputLayer(0);
282  IConnectableLayer* output2Layer = preventFusing?network->AddOutputLayer(1):nullptr;
283 
284  // Define layers information
285  TensorInfo inputInfo(LayerTest::GetInputShape(), ArmnnType, g_qScale, g_qOffset);
286  TensorInfo outputInfo(LayerTest::GetOutputShape(), ArmnnType, g_qScale, g_qOffset);
287 
288  // Set layer information
289  inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
290  receiverLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
291  activationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
292 
293  // Connect layers
294  inputLayer->GetOutputSlot(0).Connect(receiverLayer->GetInputSlot(0));
295  receiverLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
296  activationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
297 
298  if (LayerTest::isElementWise)
299  {
300  inputLayer->GetOutputSlot(0).Connect(receiverLayer->GetInputSlot(1));
301  }
302  if (preventFusing)
303  {
304  receiverLayer->GetOutputSlot(0).Connect(output2Layer->GetInputSlot(0));
305  }
306 
307  return network;
308 }
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0

◆ FuseActivationIntoPreviousLayerTest()

void FuseActivationIntoPreviousLayerTest ( ActivationDescriptor  activationDescriptor,
float  tolerance,
armnn::Compute  backendId 
)

Definition at line 314 of file FuseActivationTests.cpp.

316 {
317  // FIRST NETWORK: Fused
318  // Construct ArmNN network
319  INetworkPtr networkFused = CreatNetwork<LayerTest, ArmnnType>(activationDescriptor, false);
320 
321  // Create ArmNN runtime
322  IRuntimePtr run = IRuntime::Create(IRuntime::CreationOptions()); // default options
323 
324  // Optimise ArmNN network
325  IOptimizedNetworkPtr optNetFused = Optimize(*networkFused, {backendId}, run->GetDeviceSpec());
326 
327  Graph graphFused = PolymorphicDowncast<OptimizedNetwork*>(optNetFused.get())->GetGraph();
328 
329  auto checkFusedConv2d = [](const armnn::Layer* const layer)->bool {
330  return IsLayerOfType<LayerType>(layer) &&
331  (layer->GetNameStr() == "fused-activation-into-receiverLayer");
332  };
333 
334  BOOST_CHECK_MESSAGE(3 == graphFused.GetNumLayers(), LayerTest::GetReceiverLayerName());
335  BOOST_TEST(CheckSequence(graphFused.cbegin(),
336  graphFused.cend(),
337  &IsLayerOfType<InputLayer>,
338  checkFusedConv2d,
339  &IsLayerOfType<OutputLayer>));
340 
341  // Load network into runtime
342  NetworkId networkIdentifier;
343  BOOST_TEST(run->LoadNetwork(networkIdentifier, std::move(optNetFused)) == Status::Success);
344 
345  //Creates structures for inputs and outputs.
346  std::vector<float> data = GetVector<float>(LayerTest::inputSize, 1.0f, 0.1f);
347  std::vector<T> inputDataFused = armnnUtils::QuantizedVector<T>(data, g_qScale, g_qOffset);
348  std::vector<T> outputDataFused(LayerTest::outputSize);
349 
350  InputTensors inputTensorsFused{
351  {0, ConstTensor(run->GetInputTensorInfo(networkIdentifier, 0), inputDataFused.data())}};
352  OutputTensors outputTensorsFused{
353  {0, Tensor(run->GetOutputTensorInfo(networkIdentifier, 0), outputDataFused.data())}};
354 
355  // Execute network
356  run->EnqueueWorkload(networkIdentifier, inputTensorsFused, outputTensorsFused);
357 
358  // SECOND NETWORK: NotFused
359  // Construct ArmNN network
360  INetworkPtr networkNotFused = CreatNetwork<LayerTest, ArmnnType>(activationDescriptor, true);
361 
362  // Create ArmNN runtime
363  IRuntimePtr runNotFused = IRuntime::Create(IRuntime::CreationOptions()); // default options
364 
365  // Optimise ArmNN network
366  IOptimizedNetworkPtr optNetNotFused = Optimize(*networkNotFused, {backendId}, runNotFused->GetDeviceSpec());
367 
368  Graph graphNotFused = PolymorphicDowncast<OptimizedNetwork*>(optNetNotFused.get())->GetGraph();
369 
370  BOOST_CHECK(5 == graphNotFused.GetNumLayers());
371  BOOST_TEST(CheckSequence(graphNotFused.cbegin(),
372  graphNotFused.cend(),
373  &IsLayerOfType<armnn::InputLayer>,
374  &IsLayerOfType<LayerType>,
375  &IsLayerOfType<armnn::ActivationLayer>,
376  &IsLayerOfType<armnn::OutputLayer>,
377  &IsLayerOfType<armnn::OutputLayer>));
378 
379  // Load network into runtime
380  NetworkId networkIdentifierNotFused;
381  BOOST_TEST(runNotFused->LoadNetwork(networkIdentifierNotFused, std::move(optNetNotFused)) == Status::Success);
382 
383  //Creates structures for inputs and outputs.
384  std::vector<T> inputDataNotFused = armnnUtils::QuantizedVector<T>(data, g_qScale, g_qOffset);
385  std::vector<T> outputDataNotFused(LayerTest::outputSize);
386  std::vector<T> outputData2NotFused(LayerTest::outputSize);
387 
388  InputTensors inputTensorsNotFused{
389  {0, ConstTensor(runNotFused->GetInputTensorInfo(networkIdentifierNotFused, 0), inputDataNotFused.data())}};
390  OutputTensors outputTensorsNotFused{
391  {0, Tensor(runNotFused->GetOutputTensorInfo(networkIdentifierNotFused, 0), outputDataNotFused.data())},
392  {1, Tensor(runNotFused->GetOutputTensorInfo(networkIdentifierNotFused, 1), outputData2NotFused.data())}};
393 
394  // Execute network
395  runNotFused->EnqueueWorkload(networkIdentifierNotFused, inputTensorsNotFused, outputTensorsNotFused);
396 
397  // Check the output of the fused-activation matches with the output of the activation in the "NotFused" network
398  for (unsigned int n = 0; n < outputDataFused.size(); ++n)
399  {
400  BOOST_CHECK_CLOSE(static_cast<float>(outputDataFused[n]), static_cast<float>(outputDataNotFused[n]),
401  T(tolerance));
402  }
403 }
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:340
int NetworkId
Definition: IRuntime.hpp:20
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:306
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1011
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:314
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:341
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:600
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101