aboutsummaryrefslogtreecommitdiff
path: root/src/armnnDeserializer/Deserializer.cpp
diff options
context:
space:
mode:
authorEllen Norris-Thompson <ellen.norris-thompson@arm.com>2019-06-19 11:46:21 +0100
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-06-19 16:27:26 +0000
commit51982472bfedf12e7d82cde6614617f94b2c86d0 (patch)
tree0e2bef4812e0a3128b47a81f393dd6583d45e4c7 /src/armnnDeserializer/Deserializer.cpp
parenta4812b6cd54e4dc4903f457066281d8bf0ccf448 (diff)
downloadarmnn-51982472bfedf12e7d82cde6614617f94b2c86d0.tar.gz
IVGCVSW-3269 Add Serialization support for the new Prelu Activation layer
* Adds serialization/deserialization support * Adds related unit test Signed-off-by: Ellen Norris-Thompson <ellen.norris-thompson@arm.com> Change-Id: I600322b03e51f443cbcd9262bb27e36e5fd95ae5
Diffstat (limited to 'src/armnnDeserializer/Deserializer.cpp')
-rw-r--r--src/armnnDeserializer/Deserializer.cpp23
1 files changed, 23 insertions, 0 deletions
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index 68f3e8d566..b23ed97b4c 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -213,6 +213,7 @@ m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
+ m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
@@ -292,6 +293,8 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt
return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
case Layer::Layer_Pooling2dLayer:
return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
+ case Layer::Layer_PreluLayer:
+ return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
case Layer::Layer_QuantizeLayer:
return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
case Layer::Layer_ReshapeLayer:
@@ -2194,4 +2197,24 @@ void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
RegisterOutputSlots(graph, layerIndex, layer);
}
+void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
+{
+ CHECK_LAYERS(graph, 0, layerIndex);
+ auto inputs = GetInputs(graph, layerIndex);
+ CHECK_LOCATION();
+ CHECK_VALID_SIZE(inputs.size(), 2);
+
+ auto outputs = GetOutputs(graph, layerIndex);
+ CHECK_VALID_SIZE(outputs.size(), 1);
+
+ auto layerName = GetLayerName(graph, layerIndex);
+ IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
+
+ armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
+ layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+
+ RegisterInputSlots(graph, layerIndex, layer);
+ RegisterOutputSlots(graph, layerIndex, layer);
+}
+
} // namespace armnnDeserializer