aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDerek Lamberti <derek.lamberti@arm.com>2019-02-20 13:57:42 +0000
committerDerek Lamberti <derek.lamberti@arm.com>2019-02-20 15:41:32 +0000
commit0028d1b0ce5f4c2c6a6eb3c66f38111c21eb47a3 (patch)
tree894d7ac05ef1610bad636e24489248e6c472b313
parentbe25fc18f7e55bc39f537601e42a9d5c9d0c111f (diff)
downloadarmnn-0028d1b0ce5f4c2c6a6eb3c66f38111c21eb47a3.tar.gz
IVGSVSW-2736 Rename DeserializeParser => Deserializer & fix namespaces
Change-Id: I4166c0bbb5ba7f8b8884e71134c21f43d1fc27b0 Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
-rw-r--r--CMakeLists.txt22
-rw-r--r--include/armnnDeserializer/IDeserializer.hpp (renamed from include/armnnDeserializeParser/IDeserializeParser.hpp)19
-rw-r--r--src/armnnDeserializeParser/README.md7
-rw-r--r--src/armnnDeserializer/Deserializer.cpp (renamed from src/armnnDeserializeParser/DeserializeParser.cpp)120
-rw-r--r--src/armnnDeserializer/Deserializer.hpp (renamed from src/armnnDeserializeParser/DeserializeParser.hpp)30
-rw-r--r--src/armnnDeserializer/DeserializerSupport.md (renamed from src/armnnDeserializeParser/DeserializerSupport.md)0
-rw-r--r--src/armnnDeserializer/README.md7
-rw-r--r--src/armnnDeserializer/test/DeserializeAdd.cpp (renamed from src/armnnDeserializeParser/test/DeserializeAdd.cpp)4
-rw-r--r--src/armnnDeserializer/test/DeserializeConvolution2d.cpp (renamed from src/armnnDeserializeParser/test/DeserializeConvolution2d.cpp)4
-rw-r--r--src/armnnDeserializer/test/DeserializeMultiplication.cpp (renamed from src/armnnDeserializeParser/test/DeserializeMultiplication.cpp)4
-rw-r--r--src/armnnDeserializer/test/DeserializePooling2d.cpp (renamed from src/armnnDeserializeParser/test/DeserializePooling2d.cpp)4
-rw-r--r--src/armnnDeserializer/test/DeserializeReshape.cpp (renamed from src/armnnDeserializeParser/test/DeserializeReshape.cpp)4
-rw-r--r--src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp (renamed from src/armnnDeserializeParser/test/ParserFlatbuffersSerializeFixture.hpp)12
-rw-r--r--src/armnnDeserializer/test/SchemaSerialize.hpp (renamed from src/armnnDeserializeParser/test/SchemaSerialize.hpp)0
-rw-r--r--src/armnnDeserializer/test/SchemaSerialize.s (renamed from src/armnnDeserializeParser/test/SchemaSerialize.s)0
-rw-r--r--src/armnnSerializer/Schema.fbs2
-rw-r--r--src/armnnSerializer/Serializer.cpp65
-rw-r--r--src/armnnSerializer/Serializer.hpp16
-rw-r--r--src/armnnSerializer/SerializerUtils.cpp89
-rw-r--r--src/armnnSerializer/SerializerUtils.hpp12
-rw-r--r--src/armnnSerializer/test/SerializerTests.cpp6
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp4
-rw-r--r--tests/InferenceModel.hpp6
23 files changed, 218 insertions, 219 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 68d87afb36..3ae971b694 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -159,14 +159,14 @@ if(BUILD_ARMNN_SERIALIZER)
set(armnn_serializer_sources)
list(APPEND armnn_serializer_sources
include/armnnSerializer/ISerializer.hpp
- include/armnnDeserializeParser/IDeserializeParser.hpp
+ include/armnnDeserializer/IDeserializer.hpp
src/armnnSerializer/Schema_generated.h
src/armnnSerializer/Serializer.hpp
src/armnnSerializer/Serializer.cpp
src/armnnSerializer/SerializerUtils.hpp
src/armnnSerializer/SerializerUtils.cpp
- src/armnnDeserializeParser/DeserializeParser.hpp
- src/armnnDeserializeParser/DeserializeParser.cpp
+ src/armnnDeserializer/Deserializer.hpp
+ src/armnnDeserializer/Deserializer.cpp
)
add_library_ex(armnnSerializer SHARED ${armnn_serializer_sources})
@@ -554,15 +554,15 @@ if(BUILD_UNIT_TESTS)
list(APPEND unittest_sources
src/armnnSerializer/Schema_generated.h
src/armnnSerializer/test/SerializerTests.cpp
- src/armnnDeserializeParser/test/DeserializeAdd.cpp
- src/armnnDeserializeParser/test/DeserializeConvolution2d.cpp
- src/armnnDeserializeParser/test/DeserializeMultiplication.cpp
- src/armnnDeserializeParser/test/DeserializePooling2d.cpp
- src/armnnDeserializeParser/test/DeserializeReshape.cpp
- src/armnnDeserializeParser/test/ParserFlatbuffersSerializeFixture.hpp
- src/armnnDeserializeParser/test/SchemaSerialize.s
+ src/armnnDeserializer/test/DeserializeAdd.cpp
+ src/armnnDeserializer/test/DeserializeConvolution2d.cpp
+ src/armnnDeserializer/test/DeserializeMultiplication.cpp
+ src/armnnDeserializer/test/DeserializePooling2d.cpp
+ src/armnnDeserializer/test/DeserializeReshape.cpp
+ src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp
+ src/armnnDeserializer/test/SchemaSerialize.s
)
- set_source_files_properties(src/armnnDeserializeParser/test/SchemaSerialize.s PROPERTIES COMPILE_FLAGS "-x assembler-with-cpp")
+ set_source_files_properties(src/armnnDeserializer/test/SchemaSerialize.s PROPERTIES COMPILE_FLAGS "-x assembler-with-cpp")
endif()
if(BUILD_ONNX_PARSER)
diff --git a/include/armnnDeserializeParser/IDeserializeParser.hpp b/include/armnnDeserializer/IDeserializer.hpp
index ab64dc9e14..11e098a523 100644
--- a/include/armnnDeserializeParser/IDeserializeParser.hpp
+++ b/include/armnnDeserializer/IDeserializer.hpp
@@ -13,20 +13,19 @@
#include <map>
#include <vector>
-namespace armnnDeserializeParser
+namespace armnnDeserializer
{
-
using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>;
-class IDeserializeParser;
-using IDeserializeParserPtr = std::unique_ptr<IDeserializeParser, void(*)(IDeserializeParser* parser)>;
+class IDeserializer;
+using IDeserializerPtr = std::unique_ptr<IDeserializer, void(*)(IDeserializer* parser)>;
-class IDeserializeParser
+class IDeserializer
{
public:
- static IDeserializeParser* CreateRaw();
- static IDeserializeParserPtr Create();
- static void Destroy(IDeserializeParser* parser);
+ static IDeserializer* CreateRaw();
+ static IDeserializerPtr Create();
+ static void Destroy(IDeserializer* parser);
/// Create an input network from binary file contents
virtual armnn::INetworkPtr CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent) = 0;
@@ -45,7 +44,7 @@ public:
const std::string& name) const = 0;
protected:
- virtual ~IDeserializeParser() {};
+ virtual ~IDeserializer() {};
};
-} \ No newline at end of file
+} //namespace armnnDeserializer \ No newline at end of file
diff --git a/src/armnnDeserializeParser/README.md b/src/armnnDeserializeParser/README.md
deleted file mode 100644
index 56eca53249..0000000000
--- a/src/armnnDeserializeParser/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# The Arm NN Deserialize parser
-
-The `armnnDeserializeParser` is a library for loading neural networks defined by Arm NN FlatBuffers files
-into the Arm NN runtime.
-
-For more information about the layers that are supported, and the networks that have been tested,
-see [DeserializeSupport.md](./DeserializeSupport.md) \ No newline at end of file
diff --git a/src/armnnDeserializeParser/DeserializeParser.cpp b/src/armnnDeserializer/Deserializer.cpp
index 9b6b5b9473..56a6570eee 100644
--- a/src/armnnDeserializeParser/DeserializeParser.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -3,7 +3,7 @@
// SPDX-License-Identifier: MIT
//
-#include "DeserializeParser.hpp"
+#include "Deserializer.hpp"
#include <armnn/ArmNN.hpp>
#include <armnn/Exceptions.hpp>
@@ -29,9 +29,9 @@
using armnn::ParseException;
using namespace armnn;
-using namespace armnn::armnnSerializer;
+using namespace armnnSerializer;
-namespace armnnDeserializeParser
+namespace armnnDeserializer
{
namespace
@@ -39,7 +39,7 @@ namespace
const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
- void CheckGraph(const DeserializeParser::GraphPtr& graph,
+ void CheckGraph(const Deserializer::GraphPtr& graph,
unsigned int layersIndex,
const CheckLocation& location)
{
@@ -66,7 +66,7 @@ const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
}
}
-void CheckLayers(const DeserializeParser::GraphPtr& graph,
+void CheckLayers(const Deserializer::GraphPtr& graph,
unsigned int layersIndex,
unsigned int layerIndex,
const CheckLocation& location)
@@ -106,7 +106,7 @@ void CheckLayers(const DeserializeParser::GraphPtr& graph,
}
}
-void CheckTensorPtr(DeserializeParser::TensorRawPtr rawPtr,
+void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
const CheckLocation& location)
{
if (rawPtr == nullptr)
@@ -121,7 +121,7 @@ void CheckTensorPtr(DeserializeParser::TensorRawPtr rawPtr,
}
}
-void CheckConstTensorPtr(DeserializeParser::ConstTensorRawPtr rawPtr,
+void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
const CheckLocation& location)
{
if (rawPtr == nullptr)
@@ -164,22 +164,22 @@ bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& e
return true;
}
-DeserializeParser::DeserializeParser()
+Deserializer::Deserializer()
: m_Network(nullptr, nullptr),
//May require LayerType_Max to be included
-m_ParserFunctions(Layer_MAX+1, &DeserializeParser::ParseUnsupportedLayer)
+m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
{
// register supported layers
- m_ParserFunctions[Layer_AdditionLayer] = &DeserializeParser::ParseAdd;
- m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializeParser::ParseConvolution2d;
- m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializeParser::ParseDepthwiseConvolution2d;
- m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializeParser::ParseMultiplication;
- m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializeParser::ParsePooling2d;
- m_ParserFunctions[Layer_ReshapeLayer] = &DeserializeParser::ParseReshape;
- m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializeParser::ParseSoftmax;
+ m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
+ m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
+ m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
+ m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
+ m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
+ m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
+ m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
}
-DeserializeParser::LayerBaseRawPtr DeserializeParser::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
+Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
{
auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
@@ -211,7 +211,7 @@ DeserializeParser::LayerBaseRawPtr DeserializeParser::GetBaseLayer(const GraphPt
}
}
-int32_t DeserializeParser::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
+int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
{
auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
@@ -226,19 +226,19 @@ int32_t DeserializeParser::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigne
return 0;
}
-armnn::DataLayout ToDataLayout(armnn::armnnSerializer::DataLayout dataLayout)
+armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
{
switch (dataLayout)
{
- case armnn::armnnSerializer::DataLayout::DataLayout_NHWC:
+ case armnnSerializer::DataLayout::DataLayout_NHWC:
return armnn::DataLayout::NHWC;
- case armnn::armnnSerializer::DataLayout::DataLayout_NCHW:
+ case armnnSerializer::DataLayout::DataLayout_NCHW:
default:
return armnn::DataLayout::NCHW;
}
}
-armnn::TensorInfo ToTensorInfo(DeserializeParser::TensorRawPtr tensorPtr)
+armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
{
armnn::DataType type;
CHECK_TENSOR_PTR(tensorPtr);
@@ -287,7 +287,7 @@ armnn::TensorInfo ToTensorInfo(DeserializeParser::TensorRawPtr tensorPtr)
return result;
}
-armnn::ConstTensor ToConstTensor(DeserializeParser::ConstTensorRawPtr constTensorPtr)
+armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
{
CHECK_CONST_TENSOR_PTR(constTensorPtr);
armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
@@ -314,7 +314,7 @@ armnn::ConstTensor ToConstTensor(DeserializeParser::ConstTensorRawPtr constTenso
}
}
-DeserializeParser::LayerBaseRawPtrVector DeserializeParser::GetGraphInputs(const GraphPtr& graphPtr)
+Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
{
CHECK_GRAPH(graphPtr, 0);
@@ -330,7 +330,7 @@ DeserializeParser::LayerBaseRawPtrVector DeserializeParser::GetGraphInputs(const
return result;
}
-DeserializeParser::LayerBaseRawPtrVector DeserializeParser::GetGraphOutputs(const GraphPtr& graphPtr)
+Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
{
CHECK_GRAPH(graphPtr, 0);
const auto& numOutputs = graphPtr->outputIds()->size();
@@ -345,7 +345,7 @@ DeserializeParser::LayerBaseRawPtrVector DeserializeParser::GetGraphOutputs(cons
return result;
}
-DeserializeParser::TensorRawPtrVector DeserializeParser::GetInputs(const GraphPtr& graphPtr,
+Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
unsigned int layerIndex)
{
CHECK_LAYERS(graphPtr, 0, layerIndex);
@@ -363,7 +363,7 @@ DeserializeParser::TensorRawPtrVector DeserializeParser::GetInputs(const GraphPt
return result;
}
-DeserializeParser::TensorRawPtrVector DeserializeParser::GetOutputs(const GraphPtr& graphPtr,
+Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
unsigned int layerIndex)
{
CHECK_LAYERS(graphPtr, 0, layerIndex);
@@ -379,7 +379,7 @@ DeserializeParser::TensorRawPtrVector DeserializeParser::GetOutputs(const GraphP
return result;
}
-void DeserializeParser::ParseUnsupportedLayer(unsigned int layerIndex)
+void Deserializer::ParseUnsupportedLayer(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
const auto layerName = GetBaseLayer(m_Graph, layerIndex)->layerName()->c_str();
@@ -393,42 +393,42 @@ void DeserializeParser::ParseUnsupportedLayer(unsigned int layerIndex)
CHECK_LOCATION().AsString()));
}
-void DeserializeParser::ResetParser()
+void Deserializer::ResetParser()
{
m_Network = armnn::INetworkPtr(nullptr, nullptr);
m_Graph = nullptr;
}
-IDeserializeParser* IDeserializeParser::CreateRaw()
+IDeserializer* IDeserializer::CreateRaw()
{
- return new DeserializeParser();
+ return new Deserializer();
}
-IDeserializeParserPtr IDeserializeParser::Create()
+IDeserializerPtr IDeserializer::Create()
{
- return IDeserializeParserPtr(CreateRaw(), &IDeserializeParser::Destroy);
+ return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
}
-void IDeserializeParser::Destroy(IDeserializeParser* parser)
+void IDeserializer::Destroy(IDeserializer* parser)
{
delete parser;
}
-INetworkPtr DeserializeParser::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
+INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
{
ResetParser();
m_Graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
return CreateNetworkFromGraph();
}
-armnn::INetworkPtr DeserializeParser::CreateNetworkFromBinary(std::istream& binaryContent)
+armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
{
ResetParser();
m_Graph = LoadGraphFromBinary(binaryContent);
return CreateNetworkFromGraph();
}
-DeserializeParser::GraphPtr DeserializeParser::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
+Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
{
if (binaryContent == nullptr)
{
@@ -447,13 +447,13 @@ DeserializeParser::GraphPtr DeserializeParser::LoadGraphFromBinary(const uint8_t
return GetSerializedGraph(binaryContent);
}
-DeserializeParser::GraphPtr DeserializeParser::LoadGraphFromBinary(std::istream& binaryContent)
+Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(std::istream& binaryContent)
{
std::string content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
return GetSerializedGraph(content.data());
}
-INetworkPtr DeserializeParser::CreateNetworkFromGraph()
+INetworkPtr Deserializer::CreateNetworkFromGraph()
{
m_Network = INetwork::Create();
BOOST_ASSERT(m_Graph != nullptr);
@@ -492,7 +492,7 @@ INetworkPtr DeserializeParser::CreateNetworkFromGraph()
return std::move(m_Network);
}
-BindingPointInfo DeserializeParser::GetNetworkInputBindingInfo(unsigned int layerIndex,
+BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
const std::string& name) const
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
@@ -514,7 +514,7 @@ BindingPointInfo DeserializeParser::GetNetworkInputBindingInfo(unsigned int laye
CHECK_LOCATION().AsString()));
}
-BindingPointInfo DeserializeParser::GetNetworkOutputBindingInfo(unsigned int layerIndex,
+BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
const std::string& name) const
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
@@ -538,7 +538,7 @@ BindingPointInfo DeserializeParser::GetNetworkOutputBindingInfo(unsigned int lay
CHECK_LOCATION().AsString()));
}
-void DeserializeParser::SetupInputLayers()
+void Deserializer::SetupInputLayers()
{
CHECK_GRAPH(m_Graph, 0);
auto inputs = GetGraphInputs(m_Graph);
@@ -554,7 +554,7 @@ void DeserializeParser::SetupInputLayers()
}
}
-void DeserializeParser::SetupOutputLayers()
+void Deserializer::SetupOutputLayers()
{
CHECK_GRAPH(m_Graph, 0);
auto outputs = GetGraphOutputs(m_Graph);
@@ -567,7 +567,7 @@ void DeserializeParser::SetupOutputLayers()
}
}
-void DeserializeParser::RegisterOutputSlots(uint32_t layerIndex,
+void Deserializer::RegisterOutputSlots(uint32_t layerIndex,
IConnectableLayer* layer)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
@@ -591,7 +591,7 @@ void DeserializeParser::RegisterOutputSlots(uint32_t layerIndex,
}
}
-void DeserializeParser::RegisterInputSlots(uint32_t layerIndex,
+void Deserializer::RegisterInputSlots(uint32_t layerIndex,
armnn::IConnectableLayer* layer)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
@@ -616,7 +616,7 @@ void DeserializeParser::RegisterInputSlots(uint32_t layerIndex,
}
}
-void DeserializeParser::RegisterInputSlotOfConnection(uint32_t connectionIndex,
+void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
armnn::IInputSlot* slot)
{
BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
@@ -625,7 +625,7 @@ void DeserializeParser::RegisterInputSlotOfConnection(uint32_t connectionIndex,
slots.inputSlots.push_back(slot);
}
-void DeserializeParser::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
+void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
armnn::IOutputSlot* slot)
{
BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
@@ -645,7 +645,7 @@ void DeserializeParser::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
slots.outputSlot = slot;
}
-void DeserializeParser::ParseAdd(unsigned int layerIndex)
+void Deserializer::ParseAdd(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
auto inputs = GetInputs(m_Graph, layerIndex);
@@ -665,7 +665,7 @@ void DeserializeParser::ParseAdd(unsigned int layerIndex)
RegisterOutputSlots(layerIndex, layer);
}
-void DeserializeParser::ParseConvolution2d(unsigned int layerIndex)
+void Deserializer::ParseConvolution2d(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
auto inputs = GetInputs(m_Graph, layerIndex);
@@ -708,7 +708,7 @@ void DeserializeParser::ParseConvolution2d(unsigned int layerIndex)
RegisterOutputSlots(layerIndex, layer);
}
-void DeserializeParser::ParseDepthwiseConvolution2d(unsigned int layerIndex)
+void Deserializer::ParseDepthwiseConvolution2d(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
auto inputs = GetInputs(m_Graph, layerIndex);
@@ -752,7 +752,7 @@ void DeserializeParser::ParseDepthwiseConvolution2d(unsigned int layerIndex)
RegisterOutputSlots(layerIndex, layer);
}
-void DeserializeParser::ParseMultiplication(unsigned int layerIndex)
+void Deserializer::ParseMultiplication(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
auto inputs = GetInputs(m_Graph, layerIndex);
@@ -772,7 +772,7 @@ void DeserializeParser::ParseMultiplication(unsigned int layerIndex)
RegisterOutputSlots(layerIndex, layer);
}
-armnn::Pooling2dDescriptor DeserializeParser::GetPoolingDescriptor(DeserializeParser::PoolingDescriptor pooling2dDesc,
+armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
unsigned int layerIndex)
{
armnn::Pooling2dDescriptor desc;
@@ -863,7 +863,7 @@ armnn::Pooling2dDescriptor DeserializeParser::GetPoolingDescriptor(DeserializePa
return desc;
}
-void DeserializeParser::ParsePooling2d(unsigned int layerIndex)
+void Deserializer::ParsePooling2d(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
@@ -885,7 +885,7 @@ void DeserializeParser::ParsePooling2d(unsigned int layerIndex)
RegisterOutputSlots(layerIndex, layer);
}
-armnn::TensorInfo DeserializeParser::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
+armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
const std::vector<uint32_t>& targetDimsIn)
{
std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
@@ -915,7 +915,7 @@ armnn::TensorInfo DeserializeParser::OutputShapeOfReshape(const armnn::TensorInf
return reshapeInfo;
}
-void DeserializeParser::ParseReshape(unsigned int layerIndex)
+void Deserializer::ParseReshape(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
auto inputs = GetInputs(m_Graph, layerIndex);
@@ -929,7 +929,7 @@ void DeserializeParser::ParseReshape(unsigned int layerIndex)
const auto targetDims = m_Graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
- armnn::TensorInfo reshapeOutputTensorInfo = DeserializeParser::OutputShapeOfReshape(inputTensorInfo, outputDims);
+ armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
@@ -958,14 +958,14 @@ void DeserializeParser::ParseReshape(unsigned int layerIndex)
RegisterOutputSlots(layerIndex, layer);
}
-void DeserializeParser::ParseSoftmax(unsigned int layerIndex)
+void Deserializer::ParseSoftmax(unsigned int layerIndex)
{
CHECK_LAYERS(m_Graph, 0, layerIndex);
- DeserializeParser::TensorRawPtrVector inputs = GetInputs(m_Graph, layerIndex);
+ Deserializer::TensorRawPtrVector inputs = GetInputs(m_Graph, layerIndex);
CHECK_VALID_SIZE(inputs.size(), 1);
- DeserializeParser::TensorRawPtrVector outputs = GetOutputs(m_Graph, layerIndex);
+ Deserializer::TensorRawPtrVector outputs = GetOutputs(m_Graph, layerIndex);
CHECK_VALID_SIZE(outputs.size(), 1);
armnn::SoftmaxDescriptor descriptor;
@@ -981,4 +981,4 @@ void DeserializeParser::ParseSoftmax(unsigned int layerIndex)
RegisterOutputSlots(layerIndex, layer);
}
-} // namespace armnnDeserializeParser
+} // namespace armnnDeserializer
diff --git a/src/armnnDeserializeParser/DeserializeParser.hpp b/src/armnnDeserializer/Deserializer.hpp
index 5f4bf2214e..a66508a158 100644
--- a/src/armnnDeserializeParser/DeserializeParser.hpp
+++ b/src/armnnDeserializer/Deserializer.hpp
@@ -6,22 +6,22 @@
#pragma once
#include "armnn/INetwork.hpp"
-#include "armnnDeserializeParser/IDeserializeParser.hpp"
+#include "armnnDeserializer/IDeserializer.hpp"
#include <Schema_generated.h>
-namespace armnnDeserializeParser
+namespace armnnDeserializer
{
-class DeserializeParser : public IDeserializeParser
+class Deserializer : public IDeserializer
{
public:
// Shorthands for deserializer types
- using ConstTensorRawPtr = const armnn::armnnSerializer::ConstTensor *;
- using GraphPtr = const armnn::armnnSerializer::SerializedGraph *;
- using TensorRawPtr = const armnn::armnnSerializer::TensorInfo *;
- using PoolingDescriptor = const armnn::armnnSerializer::Pooling2dDescriptor *;
+ using ConstTensorRawPtr = const armnnSerializer::ConstTensor *;
+ using GraphPtr = const armnnSerializer::SerializedGraph *;
+ using TensorRawPtr = const armnnSerializer::TensorInfo *;
+ using PoolingDescriptor = const armnnSerializer::Pooling2dDescriptor *;
using TensorRawPtrVector = std::vector<TensorRawPtr>;
- using LayerRawPtr = const armnn::armnnSerializer::LayerBase *;
- using LayerBaseRawPtr = const armnn::armnnSerializer::LayerBase *;
+ using LayerRawPtr = const armnnSerializer::LayerBase *;
+ using LayerBaseRawPtr = const armnnSerializer::LayerBase *;
using LayerBaseRawPtrVector = std::vector<LayerBaseRawPtr>;
public:
@@ -38,8 +38,8 @@ public:
/// Retrieve binding info (layer id and tensor info) for the network output identified by the given layer name
BindingPointInfo GetNetworkOutputBindingInfo(unsigned int layerId, const std::string& name) const override;
- DeserializeParser();
- ~DeserializeParser() {}
+ Deserializer();
+ ~Deserializer() {}
public:
// testable helpers
@@ -58,14 +58,14 @@ public:
private:
// No copying allowed until it is wanted and properly implemented
- DeserializeParser(const DeserializeParser&) = delete;
- DeserializeParser& operator=(const DeserializeParser&) = delete;
+ Deserializer(const Deserializer&) = delete;
+ Deserializer& operator=(const Deserializer&) = delete;
/// Create the network from an already loaded flatbuffers graph
armnn::INetworkPtr CreateNetworkFromGraph();
// signature for the parser functions
- using LayerParsingFunction = void(DeserializeParser::*)(unsigned int layerIndex);
+ using LayerParsingFunction = void(Deserializer::*)(unsigned int layerIndex);
void ParseUnsupportedLayer(unsigned int layerIndex);
void ParseAdd(unsigned int layerIndex);
@@ -107,4 +107,4 @@ private:
std::vector<Connection> m_GraphConnections;
};
-}
+} //namespace armnnDeserializer
diff --git a/src/armnnDeserializeParser/DeserializerSupport.md b/src/armnnDeserializer/DeserializerSupport.md
index 86d3d02415..86d3d02415 100644
--- a/src/armnnDeserializeParser/DeserializerSupport.md
+++ b/src/armnnDeserializer/DeserializerSupport.md
diff --git a/src/armnnDeserializer/README.md b/src/armnnDeserializer/README.md
new file mode 100644
index 0000000000..28d6a37388
--- /dev/null
+++ b/src/armnnDeserializer/README.md
@@ -0,0 +1,7 @@
+# The Arm NN Deserializer
+
+The `armnnDeserializer` is a library for loading neural networks defined by Arm NN FlatBuffers files
+into the Arm NN runtime.
+
+For more information about the layers that are supported, and the networks that have been tested,
+see [DeserializerSupport.md](./DeserializerSupport.md) \ No newline at end of file
diff --git a/src/armnnDeserializeParser/test/DeserializeAdd.cpp b/src/armnnDeserializer/test/DeserializeAdd.cpp
index f0b85905b3..b053b10efa 100644
--- a/src/armnnDeserializeParser/test/DeserializeAdd.cpp
+++ b/src/armnnDeserializer/test/DeserializeAdd.cpp
@@ -5,12 +5,12 @@
#include <boost/test/unit_test.hpp>
#include "ParserFlatbuffersSerializeFixture.hpp"
-#include "../DeserializeParser.hpp"
+#include "../Deserializer.hpp"
#include <string>
#include <iostream>
-BOOST_AUTO_TEST_SUITE(DeserializeParser)
+BOOST_AUTO_TEST_SUITE(Deserializer)
struct AddFixture : public ParserFlatbuffersSerializeFixture
{
diff --git a/src/armnnDeserializeParser/test/DeserializeConvolution2d.cpp b/src/armnnDeserializer/test/DeserializeConvolution2d.cpp
index f3f6feb7a1..86f7cac3bb 100644
--- a/src/armnnDeserializeParser/test/DeserializeConvolution2d.cpp
+++ b/src/armnnDeserializer/test/DeserializeConvolution2d.cpp
@@ -5,12 +5,12 @@
#include <boost/test/unit_test.hpp>
#include "ParserFlatbuffersSerializeFixture.hpp"
-#include "../DeserializeParser.hpp"
+#include "../Deserializer.hpp"
#include <string>
#include <iostream>
-BOOST_AUTO_TEST_SUITE(DeserializeParser)
+BOOST_AUTO_TEST_SUITE(Deserializer)
struct Convolution2dFixture : public ParserFlatbuffersSerializeFixture
{
diff --git a/src/armnnDeserializeParser/test/DeserializeMultiplication.cpp b/src/armnnDeserializer/test/DeserializeMultiplication.cpp
index f69413b223..a9dbfbf7da 100644
--- a/src/armnnDeserializeParser/test/DeserializeMultiplication.cpp
+++ b/src/armnnDeserializer/test/DeserializeMultiplication.cpp
@@ -5,12 +5,12 @@
#include <boost/test/unit_test.hpp>
#include "ParserFlatbuffersSerializeFixture.hpp"
-#include "../DeserializeParser.hpp"
+#include "../Deserializer.hpp"
#include <string>
#include <iostream>
-BOOST_AUTO_TEST_SUITE(DeserializeParser)
+BOOST_AUTO_TEST_SUITE(Deserializer)
struct MultiplicationFixture : public ParserFlatbuffersSerializeFixture
{
diff --git a/src/armnnDeserializeParser/test/DeserializePooling2d.cpp b/src/armnnDeserializer/test/DeserializePooling2d.cpp
index 70b96ba27b..ef30a84342 100644
--- a/src/armnnDeserializeParser/test/DeserializePooling2d.cpp
+++ b/src/armnnDeserializer/test/DeserializePooling2d.cpp
@@ -5,12 +5,12 @@
#include <boost/test/unit_test.hpp>
#include "ParserFlatbuffersSerializeFixture.hpp"
-#include "../DeserializeParser.hpp"
+#include "../Deserializer.hpp"
#include <string>
#include <iostream>
-BOOST_AUTO_TEST_SUITE(DeserializeParser)
+BOOST_AUTO_TEST_SUITE(Deserializer)
struct Pooling2dFixture : public ParserFlatbuffersSerializeFixture
{
diff --git a/src/armnnDeserializeParser/test/DeserializeReshape.cpp b/src/armnnDeserializer/test/DeserializeReshape.cpp
index 21e60933f6..301d8986c0 100644
--- a/src/armnnDeserializeParser/test/DeserializeReshape.cpp
+++ b/src/armnnDeserializer/test/DeserializeReshape.cpp
@@ -5,12 +5,12 @@
#include <boost/test/unit_test.hpp>
#include "ParserFlatbuffersSerializeFixture.hpp"
-#include "../DeserializeParser.hpp"
+#include "../Deserializer.hpp"
#include <string>
#include <iostream>
-BOOST_AUTO_TEST_SUITE(DeserializeParser)
+BOOST_AUTO_TEST_SUITE(Deserializer)
struct ReshapeFixture : public ParserFlatbuffersSerializeFixture
{
diff --git a/src/armnnDeserializeParser/test/ParserFlatbuffersSerializeFixture.hpp b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp
index 5d8c377981..42ab2b17d6 100644
--- a/src/armnnDeserializeParser/test/ParserFlatbuffersSerializeFixture.hpp
+++ b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp
@@ -8,7 +8,7 @@
#include "SchemaSerialize.hpp"
#include <armnn/IRuntime.hpp>
-#include <armnnDeserializeParser/IDeserializeParser.hpp>
+#include <armnnDeserializer/IDeserializer.hpp>
#include <boost/assert.hpp>
#include <boost/format.hpp>
@@ -21,13 +21,13 @@
#include <Schema_generated.h>
-using armnnDeserializeParser::IDeserializeParser;
-using TensorRawPtr = armnn::armnnSerializer::TensorInfo*;
+using armnnDeserializer::IDeserializer;
+using TensorRawPtr = armnnSerializer::TensorInfo*;
struct ParserFlatbuffersSerializeFixture
{
ParserFlatbuffersSerializeFixture() :
- m_Parser(IDeserializeParser::Create()),
+ m_Parser(IDeserializer::Create()),
m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions())),
m_NetworkIdentifier(-1)
{
@@ -35,7 +35,7 @@ struct ParserFlatbuffersSerializeFixture
std::vector<uint8_t> m_GraphBinary;
std::string m_JsonString;
- std::unique_ptr<IDeserializeParser, void (*)(IDeserializeParser* parser)> m_Parser;
+ std::unique_ptr<IDeserializer, void (*)(IDeserializer* parser)> m_Parser;
armnn::IRuntimePtr m_Runtime;
armnn::NetworkId m_NetworkIdentifier;
@@ -133,7 +133,7 @@ struct ParserFlatbuffersSerializeFixture
const std::map<std::string, std::vector<DataType>>& expectedOutputData);
void CheckTensors(const TensorRawPtr& tensors, size_t shapeSize, const std::vector<int32_t>& shape,
- armnn::armnnSerializer::TensorInfo tensorType, const std::string& name,
+ armnnSerializer::TensorInfo tensorType, const std::string& name,
const float scale, const int64_t zeroPoint)
{
BOOST_CHECK_EQUAL(shapeSize, tensors->dimensions()->size());
diff --git a/src/armnnDeserializeParser/test/SchemaSerialize.hpp b/src/armnnDeserializer/test/SchemaSerialize.hpp
index ec7e6bab6a..ec7e6bab6a 100644
--- a/src/armnnDeserializeParser/test/SchemaSerialize.hpp
+++ b/src/armnnDeserializer/test/SchemaSerialize.hpp
diff --git a/src/armnnDeserializeParser/test/SchemaSerialize.s b/src/armnnDeserializer/test/SchemaSerialize.s
index dbbb7db3e5..dbbb7db3e5 100644
--- a/src/armnnDeserializeParser/test/SchemaSerialize.s
+++ b/src/armnnDeserializer/test/SchemaSerialize.s
diff --git a/src/armnnSerializer/Schema.fbs b/src/armnnSerializer/Schema.fbs
index 6c542b1b2d..1b7427b185 100644
--- a/src/armnnSerializer/Schema.fbs
+++ b/src/armnnSerializer/Schema.fbs
@@ -3,7 +3,7 @@
// SPDX-License-Identifier: MIT
//
-namespace armnn.armnnSerializer;
+namespace armnnSerializer;
file_identifier "ARMN";
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 27204a010d..b85c45aa10 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -17,7 +17,7 @@
using namespace armnn;
namespace fb = flatbuffers;
-namespace serializer = armnn::armnnSerializer;
+namespace serializer = armnnSerializer;
namespace armnnSerializer
{
@@ -40,7 +40,7 @@ uint32_t SerializerVisitor::GetSerializedId(unsigned int guid)
}
// Build FlatBuffer for Input Layer
-void SerializerVisitor::VisitInputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name)
+void SerializerVisitor::VisitInputLayer(const armnn::IConnectableLayer* layer, LayerBindingId id, const char* name)
{
// Create FlatBuffer BaseLayer
auto flatBufferInputBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Input);
@@ -60,7 +60,7 @@ void SerializerVisitor::VisitInputLayer(const IConnectableLayer* layer, LayerBin
}
// Build FlatBuffer for Output Layer
-void SerializerVisitor::VisitOutputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name)
+void SerializerVisitor::VisitOutputLayer(const armnn::IConnectableLayer* layer, LayerBindingId id, const char* name)
{
// Create FlatBuffer BaseLayer
auto flatBufferOutputBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Output);
@@ -79,7 +79,7 @@ void SerializerVisitor::VisitOutputLayer(const IConnectableLayer* layer, LayerBi
}
// Build FlatBuffer for Addition Layer
-void SerializerVisitor::VisitAdditionLayer(const IConnectableLayer* layer, const char* name)
+void SerializerVisitor::VisitAdditionLayer(const armnn::IConnectableLayer* layer, const char* name)
{
// Create FlatBuffer BaseLayer
auto flatBufferAdditionBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Addition);
@@ -92,10 +92,10 @@ void SerializerVisitor::VisitAdditionLayer(const IConnectableLayer* layer, const
}
// Build FlatBuffer for Convolution2dLayer
-void SerializerVisitor::VisitConvolution2dLayer(const IConnectableLayer* layer,
- const Convolution2dDescriptor& descriptor,
- const ConstTensor& weights,
- const Optional<ConstTensor>& biases,
+void SerializerVisitor::VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::Convolution2dDescriptor& descriptor,
+ const armnn::ConstTensor& weights,
+ const armnn::Optional<armnn::ConstTensor>& biases,
const char* name)
{
// Create FlatBuffer BaseLayer
@@ -129,10 +129,10 @@ void SerializerVisitor::VisitConvolution2dLayer(const IConnectableLayer* layer,
CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_Convolution2dLayer);
}
-void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const IConnectableLayer* layer,
- const DepthwiseConvolution2dDescriptor& descriptor,
- const ConstTensor& weights,
- const Optional<ConstTensor>& biases,
+void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::DepthwiseConvolution2dDescriptor& descriptor,
+ const armnn::ConstTensor& weights,
+ const armnn::Optional<armnn::ConstTensor>& biases,
const char* name)
{
auto fbBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_DepthwiseConvolution2d);
@@ -163,7 +163,7 @@ void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const IConnectableLayer
}
// Build FlatBuffer for Multiplication Layer
-void SerializerVisitor::VisitMultiplicationLayer(const IConnectableLayer* layer, const char* name)
+void SerializerVisitor::VisitMultiplicationLayer(const armnn::IConnectableLayer* layer, const char* name)
{
// Create FlatBuffer BaseLayer
auto flatBufferMultiplicationBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Multiplication);
@@ -177,7 +177,7 @@ void SerializerVisitor::VisitMultiplicationLayer(const IConnectableLayer* layer,
}
// Build FlatBuffer for Reshape Layer
-void SerializerVisitor::VisitReshapeLayer(const IConnectableLayer* layer,
+void SerializerVisitor::VisitReshapeLayer(const armnn::IConnectableLayer* layer,
const armnn::ReshapeDescriptor& reshapeDescriptor,
const char* name)
{
@@ -202,8 +202,8 @@ void SerializerVisitor::VisitReshapeLayer(const IConnectableLayer* layer,
}
// Build FlatBuffer for Softmax Layer
-void SerializerVisitor::VisitSoftmaxLayer(const IConnectableLayer* layer,
- const SoftmaxDescriptor& softmaxDescriptor,
+void SerializerVisitor::VisitSoftmaxLayer(const armnn::IConnectableLayer* layer,
+ const armnn::SoftmaxDescriptor& softmaxDescriptor,
const char* name)
{
// Create FlatBuffer BaseLayer
@@ -222,8 +222,8 @@ void SerializerVisitor::VisitSoftmaxLayer(const IConnectableLayer* layer,
CreateAnyLayer(flatBufferSoftmaxLayer.o, serializer::Layer::Layer_SoftmaxLayer);
}
-void SerializerVisitor::VisitPooling2dLayer(const IConnectableLayer* layer,
- const Pooling2dDescriptor& pooling2dDescriptor,
+void SerializerVisitor::VisitPooling2dLayer(const armnn::IConnectableLayer* layer,
+ const armnn::Pooling2dDescriptor& pooling2dDescriptor,
const char* name)
{
auto fbPooling2dBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Pooling2d);
@@ -249,7 +249,7 @@ void SerializerVisitor::VisitPooling2dLayer(const IConnectableLayer* layer,
CreateAnyLayer(fbPooling2dLayer.o, serializer::Layer::Layer_Pooling2dLayer);
}
-fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const IConnectableLayer* layer,
+fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const armnn::IConnectableLayer* layer,
const serializer::LayerType layerType)
{
std::vector<fb::Offset<serializer::InputSlot>> inputSlots = CreateInputSlots(layer);
@@ -265,9 +265,7 @@ fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const IConn
void SerializerVisitor::CreateAnyLayer(const flatbuffers::Offset<void>& layer, const serializer::Layer serializerLayer)
{
- auto anyLayer = armnn::armnnSerializer::CreateAnyLayer(m_flatBufferBuilder,
- serializerLayer,
- layer);
+ auto anyLayer = armnnSerializer::CreateAnyLayer(m_flatBufferBuilder, serializerLayer, layer);
m_serializedLayers.push_back(anyLayer);
}
@@ -280,9 +278,10 @@ flatbuffers::Offset<flatbuffers::Vector<T>> SerializerVisitor::CreateDataVector(
return fbVector;
}
-flatbuffers::Offset<serializer::ConstTensor> SerializerVisitor::CreateConstTensorInfo(const ConstTensor& constTensor)
+flatbuffers::Offset<serializer::ConstTensor>
+ SerializerVisitor::CreateConstTensorInfo(const armnn::ConstTensor& constTensor)
{
- TensorInfo tensorInfo = constTensor.GetInfo();
+ armnn::TensorInfo tensorInfo = constTensor.GetInfo();
// Get the dimensions
std::vector<unsigned int> shape;
@@ -302,8 +301,8 @@ flatbuffers::Offset<serializer::ConstTensor> SerializerVisitor::CreateConstTenso
switch (tensorInfo.GetDataType())
{
- case DataType::Float32:
- case DataType::Signed32:
+ case armnn::DataType::Float32:
+ case armnn::DataType::Signed32:
{
auto fbVector = CreateDataVector<int32_t>(constTensor.GetMemoryArea(), constTensor.GetNumBytes());
flatbuffers::Offset<serializer::IntData> flatBuffersData = serializer::CreateIntData(
@@ -312,7 +311,7 @@ flatbuffers::Offset<serializer::ConstTensor> SerializerVisitor::CreateConstTenso
fbPayload = flatBuffersData.o;
break;
}
- case DataType::Float16:
+ case armnn::DataType::Float16:
{
auto fbVector = CreateDataVector<int16_t>(constTensor.GetMemoryArea(), constTensor.GetNumBytes());
flatbuffers::Offset<serializer::ShortData> flatBuffersData = serializer::CreateShortData(
@@ -321,8 +320,8 @@ flatbuffers::Offset<serializer::ConstTensor> SerializerVisitor::CreateConstTenso
fbPayload = flatBuffersData.o;
break;
}
- case DataType::QuantisedAsymm8:
- case DataType::Boolean:
+ case armnn::DataType::QuantisedAsymm8:
+ case armnn::DataType::Boolean:
default:
{
auto fbVector = CreateDataVector<int8_t>(constTensor.GetMemoryArea(), constTensor.GetNumBytes());
@@ -340,7 +339,8 @@ flatbuffers::Offset<serializer::ConstTensor> SerializerVisitor::CreateConstTenso
return flatBufferConstTensor;
}
-std::vector<fb::Offset<serializer::InputSlot>> SerializerVisitor::CreateInputSlots(const IConnectableLayer* layer)
+std::vector<fb::Offset<serializer::InputSlot>>
+ SerializerVisitor::CreateInputSlots(const armnn::IConnectableLayer* layer)
{
std::vector<fb::Offset<serializer::InputSlot>> inputSlots;
@@ -361,7 +361,8 @@ std::vector<fb::Offset<serializer::InputSlot>> SerializerVisitor::CreateInputSlo
return inputSlots;
}
-std::vector<fb::Offset<serializer::OutputSlot>> SerializerVisitor::CreateOutputSlots(const IConnectableLayer* layer)
+std::vector<fb::Offset<serializer::OutputSlot>>
+ SerializerVisitor::CreateOutputSlots(const armnn::IConnectableLayer* layer)
{
std::vector<fb::Offset<serializer::OutputSlot>> outputSlots;
@@ -369,7 +370,7 @@ std::vector<fb::Offset<serializer::OutputSlot>> SerializerVisitor::CreateOutputS
for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
{
const IOutputSlot& outputSlot = layer->GetOutputSlot(slotIndex);
- const TensorInfo& tensorInfo = outputSlot.GetTensorInfo();
+ const armnn::TensorInfo& tensorInfo = outputSlot.GetTensorInfo();
// Get the dimensions
std::vector<unsigned int> shape;
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index 907d4eda69..aa765a2065 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -37,7 +37,7 @@ public:
return m_outputIds;
}
- std::vector<flatbuffers::Offset<armnn::armnnSerializer::AnyLayer>>& GetSerializedLayers()
+ std::vector<flatbuffers::Offset<armnnSerializer::AnyLayer>>& GetSerializedLayers()
{
return m_serializedLayers;
}
@@ -83,15 +83,15 @@ public:
private:
/// Creates the Input Slots and Output Slots and LayerBase for the layer.
- flatbuffers::Offset<armnn::armnnSerializer::LayerBase> CreateLayerBase(
+ flatbuffers::Offset<armnnSerializer::LayerBase> CreateLayerBase(
const armnn::IConnectableLayer* layer,
- const armnn::armnnSerializer::LayerType layerType);
+ const armnnSerializer::LayerType layerType);
/// Creates the serializer AnyLayer for the layer and adds it to m_serializedLayers.
- void CreateAnyLayer(const flatbuffers::Offset<void>& layer, const armnn::armnnSerializer::Layer serializerLayer);
+ void CreateAnyLayer(const flatbuffers::Offset<void>& layer, const armnnSerializer::Layer serializerLayer);
/// Creates the serializer ConstTensor for the armnn ConstTensor.
- flatbuffers::Offset<armnn::armnnSerializer::ConstTensor> CreateConstTensorInfo(
+ flatbuffers::Offset<armnnSerializer::ConstTensor> CreateConstTensorInfo(
const armnn::ConstTensor& constTensor);
template <typename T>
@@ -101,18 +101,18 @@ private:
uint32_t GetSerializedId(unsigned int guid);
/// Creates the serializer InputSlots for the layer.
- std::vector<flatbuffers::Offset<armnn::armnnSerializer::InputSlot>> CreateInputSlots(
+ std::vector<flatbuffers::Offset<armnnSerializer::InputSlot>> CreateInputSlots(
const armnn::IConnectableLayer* layer);
/// Creates the serializer OutputSlots for the layer.
- std::vector<flatbuffers::Offset<armnn::armnnSerializer::OutputSlot>> CreateOutputSlots(
+ std::vector<flatbuffers::Offset<armnnSerializer::OutputSlot>> CreateOutputSlots(
const armnn::IConnectableLayer* layer);
/// FlatBufferBuilder to create our layers' FlatBuffers.
flatbuffers::FlatBufferBuilder m_flatBufferBuilder;
/// AnyLayers required by the SerializedGraph.
- std::vector<flatbuffers::Offset<armnn::armnnSerializer::AnyLayer>> m_serializedLayers;
+ std::vector<flatbuffers::Offset<armnnSerializer::AnyLayer>> m_serializedLayers;
/// Guids of all Input Layers required by the SerializedGraph.
std::vector<unsigned int> m_inputIds;
diff --git a/src/armnnSerializer/SerializerUtils.cpp b/src/armnnSerializer/SerializerUtils.cpp
index 2bad85e1a0..592f258b81 100644
--- a/src/armnnSerializer/SerializerUtils.cpp
+++ b/src/armnnSerializer/SerializerUtils.cpp
@@ -9,91 +9,90 @@ namespace armnnSerializer
{
using namespace armnn;
-namespace serializer = armnn::armnnSerializer;
-serializer::ConstTensorData GetFlatBufferConstTensorData(DataType dataType)
+armnnSerializer::ConstTensorData GetFlatBufferConstTensorData(armnn::DataType dataType)
{
switch (dataType)
{
- case DataType::Float32:
- case DataType::Signed32:
- return serializer::ConstTensorData::ConstTensorData_IntData;
- case DataType::Float16:
- return serializer::ConstTensorData::ConstTensorData_ShortData;
- case DataType::QuantisedAsymm8:
- case DataType::Boolean:
- return serializer::ConstTensorData::ConstTensorData_ByteData;
+ case armnn::DataType::Float32:
+ case armnn::DataType::Signed32:
+ return armnnSerializer::ConstTensorData::ConstTensorData_IntData;
+ case armnn::DataType::Float16:
+ return armnnSerializer::ConstTensorData::ConstTensorData_ShortData;
+ case armnn::DataType::QuantisedAsymm8:
+ case armnn::DataType::Boolean:
+ return armnnSerializer::ConstTensorData::ConstTensorData_ByteData;
default:
- return serializer::ConstTensorData::ConstTensorData_NONE;
+ return armnnSerializer::ConstTensorData::ConstTensorData_NONE;
}
}
-serializer::DataType GetFlatBufferDataType(DataType dataType)
+armnnSerializer::DataType GetFlatBufferDataType(armnn::DataType dataType)
{
switch (dataType)
{
- case DataType::Float32:
- return serializer::DataType::DataType_Float32;
- case DataType::Float16:
- return serializer::DataType::DataType_Float16;
- case DataType::Signed32:
- return serializer::DataType::DataType_Signed32;
- case DataType::QuantisedAsymm8:
- return serializer::DataType::DataType_QuantisedAsymm8;
- case DataType::Boolean:
- return serializer::DataType::DataType_Boolean;
+ case armnn::DataType::Float32:
+ return armnnSerializer::DataType::DataType_Float32;
+ case armnn::DataType::Float16:
+ return armnnSerializer::DataType::DataType_Float16;
+ case armnn::DataType::Signed32:
+ return armnnSerializer::DataType::DataType_Signed32;
+ case armnn::DataType::QuantisedAsymm8:
+ return armnnSerializer::DataType::DataType_QuantisedAsymm8;
+ case armnn::DataType::Boolean:
+ return armnnSerializer::DataType::DataType_Boolean;
default:
- return serializer::DataType::DataType_Float16;
+ return armnnSerializer::DataType::DataType_Float16;
}
}
-serializer::DataLayout GetFlatBufferDataLayout(DataLayout dataLayout)
+armnnSerializer::DataLayout GetFlatBufferDataLayout(armnn::DataLayout dataLayout)
{
switch (dataLayout)
{
- case DataLayout::NHWC:
- return serializer::DataLayout::DataLayout_NHWC;
- case DataLayout::NCHW:
+ case armnn::DataLayout::NHWC:
+ return armnnSerializer::DataLayout::DataLayout_NHWC;
+ case armnn::DataLayout::NCHW:
default:
- return serializer::DataLayout::DataLayout_NCHW;
+ return armnnSerializer::DataLayout::DataLayout_NCHW;
}
}
-serializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(PoolingAlgorithm poolingAlgorithm)
+armnnSerializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(armnn::PoolingAlgorithm poolingAlgorithm)
{
switch (poolingAlgorithm)
{
- case PoolingAlgorithm::Average:
- return serializer::PoolingAlgorithm::PoolingAlgorithm_Average;
- case PoolingAlgorithm::L2:
- return serializer::PoolingAlgorithm::PoolingAlgorithm_L2;
- case PoolingAlgorithm::Max:
+ case armnn::PoolingAlgorithm::Average:
+ return armnnSerializer::PoolingAlgorithm::PoolingAlgorithm_Average;
+ case armnn::PoolingAlgorithm::L2:
+ return armnnSerializer::PoolingAlgorithm::PoolingAlgorithm_L2;
+ case armnn::PoolingAlgorithm::Max:
default:
- return serializer::PoolingAlgorithm::PoolingAlgorithm_Max;
+ return armnnSerializer::PoolingAlgorithm::PoolingAlgorithm_Max;
}
}
-serializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(OutputShapeRounding outputShapeRounding)
+armnnSerializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(armnn::OutputShapeRounding outputShapeRounding)
{
switch (outputShapeRounding)
{
- case OutputShapeRounding::Ceiling:
- return serializer::OutputShapeRounding::OutputShapeRounding_Ceiling;
- case OutputShapeRounding::Floor:
+ case armnn::OutputShapeRounding::Ceiling:
+ return armnnSerializer::OutputShapeRounding::OutputShapeRounding_Ceiling;
+ case armnn::OutputShapeRounding::Floor:
default:
- return serializer::OutputShapeRounding::OutputShapeRounding_Floor;
+ return armnnSerializer::OutputShapeRounding::OutputShapeRounding_Floor;
}
}
-serializer::PaddingMethod GetFlatBufferPaddingMethod(PaddingMethod paddingMethod)
+armnnSerializer::PaddingMethod GetFlatBufferPaddingMethod(armnn::PaddingMethod paddingMethod)
{
switch (paddingMethod)
{
- case PaddingMethod::IgnoreValue:
- return serializer::PaddingMethod::PaddingMethod_IgnoreValue;
- case PaddingMethod::Exclude:
+ case armnn::PaddingMethod::IgnoreValue:
+ return armnnSerializer::PaddingMethod::PaddingMethod_IgnoreValue;
+ case armnn::PaddingMethod::Exclude:
default:
- return serializer::PaddingMethod::PaddingMethod_Exclude;
+ return armnnSerializer::PaddingMethod::PaddingMethod_Exclude;
}
}
diff --git a/src/armnnSerializer/SerializerUtils.hpp b/src/armnnSerializer/SerializerUtils.hpp
index 06f3076fd6..71eb01bbeb 100644
--- a/src/armnnSerializer/SerializerUtils.hpp
+++ b/src/armnnSerializer/SerializerUtils.hpp
@@ -11,17 +11,17 @@
namespace armnnSerializer
{
-armnn::armnnSerializer::ConstTensorData GetFlatBufferConstTensorData(armnn::DataType dataType);
+armnnSerializer::ConstTensorData GetFlatBufferConstTensorData(armnn::DataType dataType);
-armnn::armnnSerializer::DataType GetFlatBufferDataType(armnn::DataType dataType);
+armnnSerializer::DataType GetFlatBufferDataType(armnn::DataType dataType);
-armnn::armnnSerializer::DataLayout GetFlatBufferDataLayout(armnn::DataLayout dataLayout);
+armnnSerializer::DataLayout GetFlatBufferDataLayout(armnn::DataLayout dataLayout);
-armnn::armnnSerializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(armnn::PoolingAlgorithm poolingAlgorithm);
+armnnSerializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(armnn::PoolingAlgorithm poolingAlgorithm);
-armnn::armnnSerializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(
+armnnSerializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(
armnn::OutputShapeRounding outputShapeRounding);
-armnn::armnnSerializer::PaddingMethod GetFlatBufferPaddingMethod(armnn::PaddingMethod paddingMethod);
+armnnSerializer::PaddingMethod GetFlatBufferPaddingMethod(armnn::PaddingMethod paddingMethod);
} // namespace armnnSerializer \ No newline at end of file
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index a88193d842..7dad6accd0 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -8,7 +8,7 @@
#include "../Serializer.hpp"
-#include <armnnDeserializeParser/IDeserializeParser.hpp>
+#include <armnnDeserializer/IDeserializer.hpp>
#include <random>
#include <sstream>
@@ -17,7 +17,7 @@
#include <boost/test/unit_test.hpp>
#include <flatbuffers/idl.h>
-using armnnDeserializeParser::IDeserializeParser;
+using armnnDeserializer::IDeserializer;
namespace
{
@@ -25,7 +25,7 @@ namespace
armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString)
{
std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
- return armnnDeserializeParser::IDeserializeParser::Create()->CreateNetworkFromBinary(serializerVector);
+ return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector);
}
std::string SerializeNetwork(const armnn::INetwork& network)
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index a97d6da3d5..c040c9b1f8 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -6,7 +6,7 @@
#include <armnn/TypesUtils.hpp>
#if defined(ARMNN_SERIALIZER)
-#include "armnnDeserializeParser/IDeserializeParser.hpp"
+#include "armnnDeserializer/IDeserializer.hpp"
#endif
#if defined(ARMNN_CAFFE_PARSER)
#include "armnnCaffeParser/ICaffeParser.hpp"
@@ -367,7 +367,7 @@ int RunTest(const std::string& format,
if (modelFormat.find("armnn") != std::string::npos)
{
#if defined(ARMNN_SERIALIZER)
- return MainImpl<armnnDeserializeParser::IDeserializeParser, float>(
+ return MainImpl<armnnDeserializer::IDeserializer, float>(
modelPath.c_str(), isModelBinary, computeDevice,
inputNamesVector, inputTensorShapes,
inputTensorDataFilePathsVector, inputTypesVector,
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index 4819523595..3b3dd95e88 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -6,7 +6,7 @@
#include <armnn/ArmNN.hpp>
#if defined(ARMNN_SERIALIZER)
-#include "armnnDeserializeParser/IDeserializeParser.hpp"
+#include "armnnDeserializer/IDeserializer.hpp"
#endif
#if defined(ARMNN_TF_LITE_PARSER)
#include <armnnTfLiteParser/ITfLiteParser.hpp>
@@ -165,10 +165,10 @@ public:
#if defined(ARMNN_SERIALIZER)
template <>
-struct CreateNetworkImpl<armnnDeserializeParser::IDeserializeParser>
+struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
{
public:
- using IParser = armnnDeserializeParser::IDeserializeParser;
+ using IParser = armnnDeserializer::IDeserializer;
using Params = InferenceModelInternal::Params;
using BindingPointInfo = InferenceModelInternal::BindingPointInfo;