26 constexpr
float defaultTolerance = 1e-6f;
28 switch (activationFunction)
31 case ActivationFunction::Elu:
32 return (dataType == DataType::Float16 ? 0.01f : 0.00001f);
33 case ActivationFunction::HardSwish:
34 return (dataType == DataType::Float16 ? 0.01f : defaultTolerance);
36 return defaultTolerance;
52 using namespace armnn;
62 Connect(input, prelu, inputInfo, 0, 0);
63 Connect(prelu, output, outputInfo, 0, 0);
82 template<armnn::DataType ArmnnType,
typename T = armnn::ResolveType<ArmnnType>>
83 void ActivationEndToEndImpl(
const std::vector<armnn::BackendId>& backends,
84 const std::vector<float>& floatInputData,
85 const std::vector<float>& floatExpectedOutputData,
90 using namespace armnn;
93 std::vector<T> inputData = armnnUtils::QuantizedVector<T>( floatInputData,
96 std::vector<T> expectedOutputData = armnnUtils::QuantizedVector<T>( floatExpectedOutputData,
100 INetworkPtr net = CreateActivationNetwork(inputInfo, outputInfo, descriptor);
102 std::map<int, std::vector<T>> inputTensorData = { { 0, inputData } };
103 std::map<int, std::vector<T>> expectedOutputTensorData = { { 0, expectedOutputData } };
105 float tolerance = GetActivationTolerance(descriptor.
m_Function, ArmnnType);
107 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(move(net),
109 expectedOutputTensorData,
119 template<armnn::DataType ArmnnType,
typename T = armnn::ResolveType<ArmnnType>>
120 void EluEndToEndTest(
const std::vector<BackendId>& backends)
122 std::vector<float> floatInputData{ -2.0f, -1.0f, -0.0f, 0.0f,
123 1.0f, 2.0f, 3.0f, 4.0f };
125 std::vector<float> floatExpectedOutputData{ -0.86466471676f, -0.63212055882f, -0.0f, 0.0f,
126 1.0f , 2.0f , 3.0f, 4.0f };
135 ActivationEndToEndImpl<ArmnnType>(backends,
137 floatExpectedOutputData,
148 template<armnn::DataType ArmnnType,
typename T = armnn::ResolveType<ArmnnType>>
149 void HardSwishEndToEndTest(
const std::vector<BackendId>& backends)
151 std::vector<float> floatInputData{ -2.0f, -1.0f, -0.5f, 0.0f,
152 1.0f, 2.0f, 3.0f, 4.0f };
154 std::vector<float> floatExpectedOutputData{ -0.33333333333f, -0.33333333333f, -0.208333f, 0.0f,
155 0.66666666667f, 1.66666666667f, 3.0f , 4.0f };
164 ActivationEndToEndImpl<ArmnnType>(backends,
166 floatExpectedOutputData,
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Copyright (c) 2021 ARM Limited and Contributors.
int32_t GetQuantizationOffset() const
float GetQuantizationScale() const
An ActivationDescriptor for the ActivationLayer.
constexpr char const * GetActivationFunctionAsCString(ActivationFunction activation)
void Connect(armnn::IConnectableLayer *from, armnn::IConnectableLayer *to, const armnn::TensorInfo &tensorInfo, unsigned int fromIndex, unsigned int toIndex)
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
static INetworkPtr Create(NetworkOptions networkOptions={})
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).