aboutsummaryrefslogtreecommitdiff
path: root/src/armnnSerializer
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnnSerializer')
-rw-r--r--src/armnnSerializer/ArmnnSchema.fbs10
-rw-r--r--src/armnnSerializer/Serializer.cpp9
-rw-r--r--src/armnnSerializer/SerializerSupport.md1
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp43
4 files changed, 60 insertions, 3 deletions
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index 1204ad8f0f..db5672f948 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -122,7 +122,8 @@ enum LayerType : uint {
Merge = 37,
Switch = 38,
Concat = 39,
- SpaceToDepth = 40
+ SpaceToDepth = 40,
+ Prelu = 41
}
// Base layer table to be used as part of other layers
@@ -555,6 +556,10 @@ table SwitchLayer {
base:LayerBase;
}
+table PreluLayer {
+ base:LayerBase;
+}
+
union Layer {
ActivationLayer,
AdditionLayer,
@@ -596,7 +601,8 @@ union Layer {
MergeLayer,
SwitchLayer,
ConcatLayer,
- SpaceToDepthLayer
+ SpaceToDepthLayer,
+ PreluLayer
}
table AnyLayer {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 012ed666f1..81231e4eba 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -712,7 +712,14 @@ void SerializerVisitor::VisitPooling2dLayer(const armnn::IConnectableLayer* laye
void SerializerVisitor::VisitPreluLayer(const armnn::IConnectableLayer* layer,
const char* name)
{
- throw UnimplementedException("SerializerVisitor::VisitPreluLayer not yet implemented");
+ // Create FlatBuffer BaseLayer
+ auto flatBufferPreluBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Prelu);
+
+ // Create the FlatBuffer AdditionLayer
+ auto flatBufferPreluLayer = serializer::CreatePreluLayer(m_flatBufferBuilder, flatBufferPreluBaseLayer);
+
+ // Add the AnyLayer to the FlatBufferLayers
+ CreateAnyLayer(flatBufferPreluLayer.o, serializer::Layer::Layer_PreluLayer);
}
void SerializerVisitor::VisitQuantizeLayer(const armnn::IConnectableLayer *layer, const char *name)
diff --git a/src/armnnSerializer/SerializerSupport.md b/src/armnnSerializer/SerializerSupport.md
index 865cdf0610..e19eb32639 100644
--- a/src/armnnSerializer/SerializerSupport.md
+++ b/src/armnnSerializer/SerializerSupport.md
@@ -33,6 +33,7 @@ The Arm NN SDK Serializer currently supports the following layers:
* Pad
* Permute
* Pooling2d
+* Prelu
* Quantize
* Reshape
* ResizeBilinear
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index a757e16436..812a4780f4 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -1499,6 +1499,49 @@ BOOST_AUTO_TEST_CASE(SerializeMultiplication)
deserializedNetwork->Accept(verifier);
}
+BOOST_AUTO_TEST_CASE(SerializePrelu)
+{
+ class PreluLayerVerifier : public LayerVerifierBase
+ {
+ public:
+ PreluLayerVerifier(const std::string& layerName,
+ const std::vector<armnn::TensorInfo>& inputInfos,
+ const std::vector<armnn::TensorInfo>& outputInfos)
+ : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
+
+ void VisitPreluLayer(const armnn::IConnectableLayer* layer, const char* name) override
+ {
+ VerifyNameAndConnections(layer, name);
+ }
+ };
+
+ const std::string layerName("prelu");
+
+ armnn::TensorInfo inputTensorInfo ({ 4, 1, 2 }, armnn::DataType::Float32);
+ armnn::TensorInfo alphaTensorInfo ({ 5, 4, 3, 1 }, armnn::DataType::Float32);
+ armnn::TensorInfo outputTensorInfo({ 5, 4, 3, 2 }, armnn::DataType::Float32);
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer* const alphaLayer = network->AddInputLayer(1);
+ armnn::IConnectableLayer* const preluLayer = network->AddPreluLayer(layerName.c_str());
+ armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+
+ inputLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(0));
+ alphaLayer->GetOutputSlot(0).Connect(preluLayer->GetInputSlot(1));
+ preluLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+ alphaLayer->GetOutputSlot(0).SetTensorInfo(alphaTensorInfo);
+ preluLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+
+ armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+ BOOST_CHECK(deserializedNetwork);
+
+ PreluLayerVerifier verifier(layerName, {inputTensorInfo, alphaTensorInfo}, {outputTensorInfo});
+ deserializedNetwork->Accept(verifier);
+}
+
BOOST_AUTO_TEST_CASE(SerializeNormalization)
{
class NormalizationLayerVerifier : public LayerVerifierBase