aboutsummaryrefslogtreecommitdiff
path: root/src/armnnDeserializer
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnnDeserializer')
-rw-r--r--src/armnnDeserializer/Deserializer.cpp59
-rw-r--r--src/armnnDeserializer/Deserializer.hpp1
-rw-r--r--src/armnnDeserializer/test/DeserializeActivation.cpp178
3 files changed, 238 insertions, 0 deletions
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index 56a6570eee..2462061190 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -170,6 +170,7 @@ Deserializer::Deserializer()
m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
{
// register supported layers
+ m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
@@ -185,6 +186,8 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt
switch(layerType)
{
+ case Layer::Layer_ActivationLayer:
+ return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
case Layer::Layer_AdditionLayer:
return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
case Layer::Layer_Convolution2dLayer:
@@ -238,6 +241,33 @@ armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
}
}
+armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
+{
+ switch (function)
+ {
+ case armnnSerializer::ActivationFunction_Sigmoid:
+ return armnn::ActivationFunction::Sigmoid;
+ case armnnSerializer::ActivationFunction_TanH:
+ return armnn::ActivationFunction::TanH;
+ case armnnSerializer::ActivationFunction_Linear:
+ return armnn::ActivationFunction::Linear;
+ case armnnSerializer::ActivationFunction_ReLu:
+ return armnn::ActivationFunction::ReLu;
+ case armnnSerializer::ActivationFunction_BoundedReLu:
+ return armnn::ActivationFunction::BoundedReLu;
+ case armnnSerializer::ActivationFunction_LeakyReLu:
+ return armnn::ActivationFunction::LeakyReLu;
+ case armnnSerializer::ActivationFunction_Abs:
+ return armnn::ActivationFunction::Abs;
+ case armnnSerializer::ActivationFunction_Sqrt:
+ return armnn::ActivationFunction::Sqrt;
+ case armnnSerializer::ActivationFunction_Square:
+ return armnn::ActivationFunction::Square;
+ default:
+ return armnn::ActivationFunction::Sigmoid;
+ }
+}
+
armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
{
armnn::DataType type;
@@ -645,6 +675,35 @@ void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
slots.outputSlot = slot;
}
+void Deserializer::ParseActivation(unsigned int layerIndex)
+{
+ CHECK_LAYERS(m_Graph, 0, layerIndex);
+ auto inputs = GetInputs(m_Graph, layerIndex);
+ CHECK_LOCATION();
+ CHECK_VALID_SIZE(inputs.size(), 1);
+
+ auto outputs = GetOutputs(m_Graph, layerIndex);
+ CHECK_VALID_SIZE(outputs.size(), 1);
+
+ auto layerName = boost::str(boost::format("Activation:%1%") % layerIndex);
+
+ auto serializerLayer = m_Graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
+ auto serializerDescriptor = serializerLayer->descriptor();
+
+ armnn::ActivationDescriptor descriptor;
+ descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
+ descriptor.m_A = serializerDescriptor->a();
+ descriptor.m_B = serializerDescriptor->b();
+
+ IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
+ layerName.c_str());
+ armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
+ layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+
+ RegisterInputSlots(layerIndex, layer);
+ RegisterOutputSlots(layerIndex, layer);
+}
+
void Deserializer::ParseAdd(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
diff --git a/src/armnnDeserializer/Deserializer.hpp b/src/armnnDeserializer/Deserializer.hpp
index a66508a158..bf78e10f40 100644
--- a/src/armnnDeserializer/Deserializer.hpp
+++ b/src/armnnDeserializer/Deserializer.hpp
@@ -68,6 +68,7 @@ private:
using LayerParsingFunction = void(Deserializer::*)(unsigned int layerIndex);
void ParseUnsupportedLayer(unsigned int layerIndex);
+ void ParseActivation(unsigned int layerIndex);
void ParseAdd(unsigned int layerIndex);
void ParseConvolution2d(unsigned int layerIndex);
void ParseDepthwiseConvolution2d(unsigned int layerIndex);
diff --git a/src/armnnDeserializer/test/DeserializeActivation.cpp b/src/armnnDeserializer/test/DeserializeActivation.cpp
new file mode 100644
index 0000000000..ad03dd662b
--- /dev/null
+++ b/src/armnnDeserializer/test/DeserializeActivation.cpp
@@ -0,0 +1,178 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include <boost/test/unit_test.hpp>
+#include "ParserFlatbuffersSerializeFixture.hpp"
+#include "../Deserializer.hpp"
+
+#include <string>
+#include <iostream>
+
+BOOST_AUTO_TEST_SUITE(DeserializeParser)
+
+struct ActivationFixture : public ParserFlatbuffersSerializeFixture
+{
+ explicit ActivationFixture(const std::string& inputShape,
+ const std::string& outputShape,
+ const std::string& dataType,
+ const std::string& activationType="Sigmoid",
+ const std::string& a = "0.0",
+ const std::string& b = "0.0")
+ {
+ m_JsonString = R"(
+ {
+ inputIds: [0],
+ outputIds: [2],
+ layers: [{
+ layer_type: "InputLayer",
+ layer: {
+ base: {
+ layerBindingId: 0,
+ base: {
+ index: 0,
+ layerName: "InputLayer",
+ layerType: "Input",
+ inputSlots: [{
+ index: 0,
+ connection: {sourceLayerIndex:0, outputSlotIndex:0 },
+ }],
+ outputSlots: [{
+ index: 0,
+ tensorInfo: {
+ dimensions: )" + inputShape + R"(,
+ dataType: )" + dataType + R"(
+ },
+ }],
+ },
+ }
+ },
+ },
+ {
+ layer_type: "ActivationLayer",
+ layer : {
+ base: {
+ index:1,
+ layerName: "ActivationLayer",
+ layerType: "Activation",
+ inputSlots: [{
+ index: 0,
+ connection: {sourceLayerIndex:0, outputSlotIndex:0 },
+ }],
+ outputSlots: [{
+ index: 0,
+ tensorInfo: {
+ dimensions: )" + outputShape + R"(,
+ dataType: )" + dataType + R"(
+ },
+ }],
+ },
+ descriptor: {
+ a: )" + a + R"(,
+ b: )" + b + R"(,
+ function: )" + activationType + R"(
+ },
+ },
+ },
+ {
+ layer_type: "OutputLayer",
+ layer: {
+ base:{
+ layerBindingId: 2,
+ base: {
+ index: 2,
+ layerName: "OutputLayer",
+ layerType: "Output",
+ inputSlots: [{
+ index: 0,
+ connection: {sourceLayerIndex:1, outputSlotIndex:0 },
+ }],
+ outputSlots: [{
+ index: 0,
+ tensorInfo: {
+ dimensions: )" + outputShape + R"(,
+ dataType: )" + dataType + R"(
+ },
+ }],
+ }
+ }
+ },
+ }]
+ }
+ )";
+ Setup();
+ }
+};
+
+struct SimpleActivationFixture : ActivationFixture
+{
+ SimpleActivationFixture() : ActivationFixture("[1, 2, 2, 1]",
+ "[1, 2, 2, 1]",
+ "QuantisedAsymm8",
+ "ReLu") {}
+};
+
+struct SimpleActivationFixture2 : ActivationFixture
+{
+ SimpleActivationFixture2() : ActivationFixture("[1, 2, 2, 1]",
+ "[1, 2, 2, 1]",
+ "Float32",
+ "ReLu") {}
+};
+
+struct SimpleActivationFixture3 : ActivationFixture
+{
+ SimpleActivationFixture3() : ActivationFixture("[1, 2, 2, 1]",
+ "[1, 2, 2, 1]",
+ "QuantisedAsymm8",
+ "BoundedReLu",
+ "5.0",
+ "0.0") {}
+};
+
+struct SimpleActivationFixture4 : ActivationFixture
+{
+ SimpleActivationFixture4() : ActivationFixture("[1, 2, 2, 1]",
+ "[1, 2, 2, 1]",
+ "Float32",
+ "BoundedReLu",
+ "5.0",
+ "0.0") {}
+};
+
+
+BOOST_FIXTURE_TEST_CASE(ActivationReluQuantisedAsymm8, SimpleActivationFixture)
+{
+ RunTest<4, armnn::DataType::QuantisedAsymm8>(
+ 0,
+ {{"InputLayer", {10, 0, 2, 0}}},
+ {{"OutputLayer", {10, 0, 2, 0}}});
+}
+
+BOOST_FIXTURE_TEST_CASE(ActivationReluFloat32, SimpleActivationFixture2)
+{
+ RunTest<4, armnn::DataType::Float32>(
+ 0,
+ {{"InputLayer", {111, -85, 226, 3}}},
+ {{"OutputLayer", {111, 0, 226, 3}}});
+}
+
+
+BOOST_FIXTURE_TEST_CASE(ActivationBoundedReluQuantisedAsymm8, SimpleActivationFixture3)
+{
+ RunTest<4, armnn::DataType::QuantisedAsymm8>(
+ 0,
+ {{"InputLayer", {10, 0, 2, 0}}},
+ {{"OutputLayer", {5, 0, 2, 0}}});
+}
+
+BOOST_FIXTURE_TEST_CASE(ActivationBoundedReluFloat32, SimpleActivationFixture4)
+{
+ RunTest<4, armnn::DataType::Float32>(
+ 0,
+ {{"InputLayer", {111, -85, 226, 3}}},
+ {{"OutputLayer", {5, 0, 5, 3}}});
+}
+
+BOOST_AUTO_TEST_SUITE_END()