aboutsummaryrefslogtreecommitdiff
path: root/src/armnnSerializer
diff options
context:
space:
mode:
authorSaoirse Stewart <saoirse.stewart@arm.com>2019-02-19 15:54:14 +0000
committerSaoirse Stewart Arm <saoirse.stewart@arm.com>2019-02-19 15:55:46 +0000
commit263829c2163d79a28f98f24f9dd1e52e1c3cbbef (patch)
treebd904ce4b8aeaa14bc0622bbacefda26011733f2 /src/armnnSerializer
parent4fbae33571871ce584e421657e8ffba299e89d67 (diff)
downloadarmnn-263829c2163d79a28f98f24f9dd1e52e1c3cbbef.tar.gz
IVGCVSW-2642 Add Reshape to Serializer and Deserializer
Change-Id: Iccded3c6e3c0713c44f43231981440420591f94e Signed-off-by: Saoirse Stewart <saoirse.stewart@arm.com> Signed-off-by: Aron Virginas-Tar <Aron.Virginas-Tar@arm.com>
Diffstat (limited to 'src/armnnSerializer')
-rw-r--r--src/armnnSerializer/Schema.fbs13
-rw-r--r--src/armnnSerializer/Serializer.cpp26
-rw-r--r--src/armnnSerializer/Serializer.hpp4
-rw-r--r--src/armnnSerializer/SerializerSupport.md1
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp61
5 files changed, 103 insertions, 2 deletions
diff --git a/src/armnnSerializer/Schema.fbs b/src/armnnSerializer/Schema.fbs
index 048181a2b1..2b96ad8de1 100644
--- a/src/armnnSerializer/Schema.fbs
+++ b/src/armnnSerializer/Schema.fbs
@@ -73,7 +73,8 @@ enum LayerType : uint {
Multiplication = 2,
Output = 3,
Pooling2d = 4,
- Softmax = 5
+ Reshape = 5,
+ Softmax = 6
}
// Base layer table to be used as part of other layers
@@ -152,12 +153,22 @@ table OutputLayer {
base:BindableLayerBase;
}
+table ReshapeLayer {
+ base:LayerBase;
+ descriptor:ReshapeDescriptor;
+}
+
+table ReshapeDescriptor {
+ targetShape:[uint];
+}
+
union Layer {
AdditionLayer,
InputLayer,
MultiplicationLayer,
OutputLayer,
Pooling2dLayer,
+ ReshapeLayer,
SoftmaxLayer
}
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 57228c406e..b229ae7e3f 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -49,7 +49,6 @@ void SerializerVisitor::VisitInputLayer(const IConnectableLayer* layer, LayerBin
auto flatBufferInputBindableBaseLayer = serializer::CreateBindableLayerBase(m_flatBufferBuilder,
flatBufferInputBaseLayer,
id);
-
// Push layer Guid to outputIds.
m_inputIds.push_back(GetSerializedId(layer->GetGuid()));
@@ -106,6 +105,31 @@ void SerializerVisitor::VisitMultiplicationLayer(const IConnectableLayer* layer,
CreateAnyLayer(flatBufferMultiplicationLayer.o, serializer::Layer::Layer_MultiplicationLayer);
}
+// Build FlatBuffer for Reshape Layer
+void SerializerVisitor::VisitReshapeLayer(const IConnectableLayer* layer,
+ const armnn::ReshapeDescriptor& reshapeDescriptor,
+ const char* name)
+{
+ // Create FlatBuffer BaseLayer
+ auto flatBufferReshapeBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Reshape);
+
+ std::vector<unsigned int> targetShape;
+ for (unsigned int i =0; i < reshapeDescriptor.m_TargetShape.GetNumDimensions(); i++)
+ {
+ targetShape.push_back(reshapeDescriptor.m_TargetShape[i]);
+ }
+
+ auto flatBufferReshapeDesc = serializer::CreateReshapeDescriptor(m_flatBufferBuilder,
+ m_flatBufferBuilder.CreateVector(targetShape));
+
+ // Create the FlatBuffer ReshapeLayer
+ auto flatBufferReshapeLayer = serializer::CreateReshapeLayer(m_flatBufferBuilder, flatBufferReshapeBaseLayer,
+ flatBufferReshapeDesc);
+
+ // Add the AnyLayer to the FlatBufferLayers
+ CreateAnyLayer(flatBufferReshapeLayer.o, serializer::Layer::Layer_ReshapeLayer);
+}
+
// Build FlatBuffer for Softmax Layer
void SerializerVisitor::VisitSoftmaxLayer(const IConnectableLayer* layer,
const SoftmaxDescriptor& softmaxDescriptor,
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index 169ed09cdd..e4485f5856 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -64,6 +64,10 @@ public:
const armnn::Pooling2dDescriptor& pooling2dDescriptor,
const char* name = nullptr) override;
+ void VisitReshapeLayer(const armnn::IConnectableLayer* layer,
+ const armnn::ReshapeDescriptor& reshapeDescriptor,
+ const char* name = nullptr) override;
+
private:
/// Creates the Input Slots and Output Slots and LayerBase for the layer.
diff --git a/src/armnnSerializer/SerializerSupport.md b/src/armnnSerializer/SerializerSupport.md
index a94e0ad151..c238543b22 100644
--- a/src/armnnSerializer/SerializerSupport.md
+++ b/src/armnnSerializer/SerializerSupport.md
@@ -8,6 +8,7 @@ The Arm NN SDK Serializer currently supports the following layers:
* Addition
* Multiplication
+* Reshape
* Softmax
* Pooling2d
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index 4b6bf1ec53..77bf78683a 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -91,6 +91,67 @@ BOOST_AUTO_TEST_CASE(SimpleNetworkWithMultiplicationSerialization)
BOOST_TEST(stream.str().find(multLayerName) != stream.str().npos);
}
+BOOST_AUTO_TEST_CASE(SimpleReshapeIntegration)
+{
+ armnn::NetworkId networkIdentifier;
+ armnn::IRuntime::CreationOptions options; // default options
+ armnn::IRuntimePtr run = armnn::IRuntime::Create(options);
+
+ unsigned int inputShape[] = {1, 9};
+ unsigned int outputShape[] = {3, 3};
+
+ auto inputTensorInfo = armnn::TensorInfo(2, inputShape, armnn::DataType::Float32);
+ auto outputTensorInfo = armnn::TensorInfo(2, outputShape, armnn::DataType::Float32);
+ auto reshapeOutputTensorInfo = armnn::TensorInfo(2, outputShape, armnn::DataType::Float32);
+
+ armnn::ReshapeDescriptor reshapeDescriptor;
+ reshapeDescriptor.m_TargetShape = reshapeOutputTensorInfo.GetShape();
+
+ armnn::INetworkPtr network = armnn::INetwork::Create();
+ armnn::IConnectableLayer *const inputLayer = network->AddInputLayer(0);
+ armnn::IConnectableLayer *const reshapeLayer = network->AddReshapeLayer(reshapeDescriptor, "ReshapeLayer");
+ armnn::IConnectableLayer *const outputLayer = network->AddOutputLayer(0);
+
+ inputLayer->GetOutputSlot(0).Connect(reshapeLayer->GetInputSlot(0));
+ inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+ reshapeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+ reshapeLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+
+ armnnSerializer::Serializer serializer;
+ serializer.Serialize(*network);
+ std::stringstream stream;
+ serializer.SaveSerializedToStream(stream);
+ std::string const serializerString{stream.str()};
+
+ //Deserialize network.
+ auto deserializedNetwork = DeserializeNetwork(serializerString);
+
+ //Optimize the deserialized network
+ auto deserializedOptimized = Optimize(*deserializedNetwork, {armnn::Compute::CpuRef},
+ run->GetDeviceSpec());
+
+ // Load graph into runtime
+ run->LoadNetwork(networkIdentifier, std::move(deserializedOptimized));
+
+ std::vector<float> input1Data(inputTensorInfo.GetNumElements());
+ std::iota(input1Data.begin(), input1Data.end(), 8);
+
+ armnn::InputTensors inputTensors
+ {
+ {0, armnn::ConstTensor(run->GetInputTensorInfo(networkIdentifier, 0), input1Data.data())}
+ };
+
+ std::vector<float> outputData(input1Data.size());
+ armnn::OutputTensors outputTensors
+ {
+ {0,armnn::Tensor(run->GetOutputTensorInfo(networkIdentifier, 0), outputData.data())}
+ };
+
+ run->EnqueueWorkload(networkIdentifier, inputTensors, outputTensors);
+
+ BOOST_CHECK_EQUAL_COLLECTIONS(outputData.begin(),outputData.end(), input1Data.begin(),input1Data.end());
+}
+
BOOST_AUTO_TEST_CASE(SimpleSoftmaxIntegration)
{
armnn::TensorInfo tensorInfo({1, 10}, armnn::DataType::Float32);