diff options
author | Sadik Armagan <sadik.armagan@arm.com> | 2021-02-03 09:29:30 +0000 |
---|---|---|
committer | Sadik Armagan <sadik.armagan@arm.com> | 2021-02-03 09:29:47 +0000 |
commit | 0c3ea5b8ac5ad8ca516930a0491afb1d1074e45b (patch) | |
tree | 47ff1e9c1c70a3b134c1e9063dada66d70a7c963 /src/armnnSerializer | |
parent | 84f41eb74765bd93307f3c6b334354c486dc746d (diff) | |
download | armnn-0c3ea5b8ac5ad8ca516930a0491afb1d1074e45b.tar.gz |
backends/reference: Add ReduceSum operation support
This patch addes ReduceSum operation support for reference backend,
which computes the sum of elements across dimensions of a tensor.
Changelog v1:
- Fix file header descriptions.
Chagelog v2:
- Fix line limit issue.
- Fix type conversion issue.
Changelog v3:
- Remove tabs.
- Modify newly added file headers.
Changelog v4:
- Symbol on header isn't allowed so drop it from newly added file headers.
Changelog v5:
- Remove tabs, fix the use of brackets and align lines correctly.
Changelog v6:
- Add serializer and deserializer support.
Changelog v7:
- Fix build error add missed code.
Changelog v8:
- Rename ReduceSumDecriptor to ReduceDescriptor
- Update m_KeepDims field data type to bool on ReduceDescriptor
- Add ReduceOperation field to ReduceDescriptor
- Rename ReduceSumLayer to ReduceLayer
- Update ReduceLayer to use ReduceDescriptor
- Update ReduceLayer::ValidateTensorShapesFromInputs() function
- Rename RefReduceSumWokload to RefReduceWorkload
- Update workload to use ReduceDescriptor
- Update workload to use Decoders and Encoders
- Remove ReduceSum.hpp and ReduceSum.cpp
- Added Reduce.hpp and Reduce.cpp
- Move Mean.cpp (which is implementing REDUCE_MEAN) functionality to Reduce.cpp
- Update RefMeanWorkload to call Reduce function with ReduceOperation::Mean argument
- Remove Mean.hpp and Mean.cpp
- Update the Serializer/Deserializer ArmnnSchema.fbs for ReduceLayer, ReduceDescriptor, and ReduceOperation
- Update Serializer and Deserializer for serializing/parsing ReduceLayer
- Added TfLiter parser Sum test for REDUCE_SUM operator
- Make corresponding changes on front-end and Ref backend to support REDUCE_SUM operator
Changelog v9:
- Fixed build errors.
Change-Id: I8c8e034f3df73f9565b3c18eff51ecca6c542195
Signed-off-by: Inki Dae <inki.dae@samsung.com>
Signed-off-by: Sadik Armagan <sadik.armagan@arm.com>
Diffstat (limited to 'src/armnnSerializer')
-rw-r--r-- | src/armnnSerializer/ArmnnSchema.fbs | 28 | ||||
-rw-r--r-- | src/armnnSerializer/ArmnnSchema_generated.h | 238 | ||||
-rw-r--r-- | src/armnnSerializer/Serializer.cpp | 19 | ||||
-rw-r--r-- | src/armnnSerializer/Serializer.hpp | 4 | ||||
-rw-r--r-- | src/armnnSerializer/SerializerUtils.cpp | 17 | ||||
-rw-r--r-- | src/armnnSerializer/SerializerUtils.hpp | 2 | ||||
-rw-r--r-- | src/armnnSerializer/test/SerializerTests.cpp | 30 |
7 files changed, 325 insertions, 13 deletions
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs index 1f71ce19f2..aa539b188f 100644 --- a/src/armnnSerializer/ArmnnSchema.fbs +++ b/src/armnnSerializer/ArmnnSchema.fbs @@ -47,6 +47,13 @@ enum DataLayout : byte { NCHW = 1 } +enum ReduceOperation: byte { + Sum = 0, + Max = 1, + Mean = 2, + Min = 3 +} + enum ResizeMethod: byte { NearestNeighbor = 0, Bilinear = 1, @@ -160,7 +167,8 @@ enum LayerType : uint { QLstm = 56, Fill = 57, Rank = 58, - LogicalBinary = 59 + LogicalBinary = 59, + Reduce = 60 } // Base layer table to be used as part of other layers @@ -881,6 +889,19 @@ table RankLayer { base:LayerBase; } +table ReduceLayer { + base:LayerBase; + descriptor:ReduceDescriptor; +} + +table ReduceDescriptor { + targetHeight:uint; + targetWidth:uint; + keepDims:bool = false; + axis:[uint]; + reduceOperation:ReduceOperation = Sum; +} + union Layer { ActivationLayer, AdditionLayer, @@ -941,7 +962,8 @@ union Layer { QLstmLayer, FillLayer, RankLayer, - LogicalBinaryLayer + LogicalBinaryLayer, + ReduceLayer } table AnyLayer { @@ -960,4 +982,4 @@ table SerializedGraph { featureVersions:FeatureCompatibilityVersions; } -root_type SerializedGraph;
\ No newline at end of file +root_type SerializedGraph; diff --git a/src/armnnSerializer/ArmnnSchema_generated.h b/src/armnnSerializer/ArmnnSchema_generated.h index 031da5d6fa..32548b2144 100644 --- a/src/armnnSerializer/ArmnnSchema_generated.h +++ b/src/armnnSerializer/ArmnnSchema_generated.h @@ -4,6 +4,7 @@ // // automatically generated by the FlatBuffers compiler, do not modify + #ifndef FLATBUFFERS_GENERATED_ARMNNSCHEMA_ARMNNSERIALIZER_H_ #define FLATBUFFERS_GENERATED_ARMNNSCHEMA_ARMNNSERIALIZER_H_ @@ -349,6 +350,12 @@ struct StandInLayerBuilder; struct RankLayer; struct RankLayerBuilder; +struct ReduceLayer; +struct ReduceLayerBuilder; + +struct ReduceDescriptor; +struct ReduceDescriptorBuilder; + struct AnyLayer; struct AnyLayerBuilder; @@ -532,6 +539,42 @@ inline const char *EnumNameDataLayout(DataLayout e) { return EnumNamesDataLayout()[index]; } +enum ReduceOperation { + ReduceOperation_Sum = 0, + ReduceOperation_Max = 1, + ReduceOperation_Mean = 2, + ReduceOperation_Min = 3, + ReduceOperation_MIN = ReduceOperation_Sum, + ReduceOperation_MAX = ReduceOperation_Min +}; + +inline const ReduceOperation (&EnumValuesReduceOperation())[4] { + static const ReduceOperation values[] = { + ReduceOperation_Sum, + ReduceOperation_Max, + ReduceOperation_Mean, + ReduceOperation_Min + }; + return values; +} + +inline const char * const *EnumNamesReduceOperation() { + static const char * const names[5] = { + "Sum", + "Max", + "Mean", + "Min", + nullptr + }; + return names; +} + +inline const char *EnumNameReduceOperation(ReduceOperation e) { + if (flatbuffers::IsOutRange(e, ReduceOperation_Sum, ReduceOperation_Min)) return ""; + const size_t index = static_cast<size_t>(e); + return EnumNamesReduceOperation()[index]; +} + enum ResizeMethod { ResizeMethod_NearestNeighbor = 0, ResizeMethod_Bilinear = 1, @@ -685,11 +728,12 @@ enum LayerType { LayerType_Fill = 57, LayerType_Rank = 58, LayerType_LogicalBinary = 59, + LayerType_Reduce = 60, LayerType_MIN = LayerType_Addition, - LayerType_MAX = LayerType_LogicalBinary + LayerType_MAX = LayerType_Reduce }; -inline const LayerType (&EnumValuesLayerType())[60] { +inline const LayerType (&EnumValuesLayerType())[61] { static const LayerType values[] = { LayerType_Addition, LayerType_Input, @@ -750,13 +794,14 @@ inline const LayerType (&EnumValuesLayerType())[60] { LayerType_QLstm, LayerType_Fill, LayerType_Rank, - LayerType_LogicalBinary + LayerType_LogicalBinary, + LayerType_Reduce }; return values; } inline const char * const *EnumNamesLayerType() { - static const char * const names[61] = { + static const char * const names[62] = { "Addition", "Input", "Multiplication", @@ -817,13 +862,14 @@ inline const char * const *EnumNamesLayerType() { "Fill", "Rank", "LogicalBinary", + "Reduce", nullptr }; return names; } inline const char *EnumNameLayerType(LayerType e) { - if (flatbuffers::IsOutRange(e, LayerType_Addition, LayerType_LogicalBinary)) return ""; + if (flatbuffers::IsOutRange(e, LayerType_Addition, LayerType_Reduce)) return ""; const size_t index = static_cast<size_t>(e); return EnumNamesLayerType()[index]; } @@ -1157,11 +1203,12 @@ enum Layer { Layer_FillLayer = 58, Layer_RankLayer = 59, Layer_LogicalBinaryLayer = 60, + Layer_ReduceLayer = 61, Layer_MIN = Layer_NONE, - Layer_MAX = Layer_LogicalBinaryLayer + Layer_MAX = Layer_ReduceLayer }; -inline const Layer (&EnumValuesLayer())[61] { +inline const Layer (&EnumValuesLayer())[62] { static const Layer values[] = { Layer_NONE, Layer_ActivationLayer, @@ -1223,13 +1270,14 @@ inline const Layer (&EnumValuesLayer())[61] { Layer_QLstmLayer, Layer_FillLayer, Layer_RankLayer, - Layer_LogicalBinaryLayer + Layer_LogicalBinaryLayer, + Layer_ReduceLayer }; return values; } inline const char * const *EnumNamesLayer() { - static const char * const names[62] = { + static const char * const names[63] = { "NONE", "ActivationLayer", "AdditionLayer", @@ -1291,13 +1339,14 @@ inline const char * const *EnumNamesLayer() { "FillLayer", "RankLayer", "LogicalBinaryLayer", + "ReduceLayer", nullptr }; return names; } inline const char *EnumNameLayer(Layer e) { - if (flatbuffers::IsOutRange(e, Layer_NONE, Layer_LogicalBinaryLayer)) return ""; + if (flatbuffers::IsOutRange(e, Layer_NONE, Layer_ReduceLayer)) return ""; const size_t index = static_cast<size_t>(e); return EnumNamesLayer()[index]; } @@ -1546,6 +1595,10 @@ template<> struct LayerTraits<armnnSerializer::LogicalBinaryLayer> { static const Layer enum_value = Layer_LogicalBinaryLayer; }; +template<> struct LayerTraits<armnnSerializer::ReduceLayer> { + static const Layer enum_value = Layer_ReduceLayer; +}; + bool VerifyLayer(flatbuffers::Verifier &verifier, const void *obj, Layer type); bool VerifyLayerVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector<flatbuffers::Offset<void>> *values, const flatbuffers::Vector<uint8_t> *types); @@ -9097,6 +9150,160 @@ inline flatbuffers::Offset<RankLayer> CreateRankLayer( return builder_.Finish(); } +struct ReduceLayer FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ReduceLayerBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BASE = 4, + VT_DESCRIPTOR = 6 + }; + const armnnSerializer::LayerBase *base() const { + return GetPointer<const armnnSerializer::LayerBase *>(VT_BASE); + } + const armnnSerializer::ReduceDescriptor *descriptor() const { + return GetPointer<const armnnSerializer::ReduceDescriptor *>(VT_DESCRIPTOR); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_BASE) && + verifier.VerifyTable(base()) && + VerifyOffset(verifier, VT_DESCRIPTOR) && + verifier.VerifyTable(descriptor()) && + verifier.EndTable(); + } +}; + +struct ReduceLayerBuilder { + typedef ReduceLayer Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_base(flatbuffers::Offset<armnnSerializer::LayerBase> base) { + fbb_.AddOffset(ReduceLayer::VT_BASE, base); + } + void add_descriptor(flatbuffers::Offset<armnnSerializer::ReduceDescriptor> descriptor) { + fbb_.AddOffset(ReduceLayer::VT_DESCRIPTOR, descriptor); + } + explicit ReduceLayerBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ReduceLayerBuilder &operator=(const ReduceLayerBuilder &); + flatbuffers::Offset<ReduceLayer> Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset<ReduceLayer>(end); + return o; + } +}; + +inline flatbuffers::Offset<ReduceLayer> CreateReduceLayer( + flatbuffers::FlatBufferBuilder &_fbb, + flatbuffers::Offset<armnnSerializer::LayerBase> base = 0, + flatbuffers::Offset<armnnSerializer::ReduceDescriptor> descriptor = 0) { + ReduceLayerBuilder builder_(_fbb); + builder_.add_descriptor(descriptor); + builder_.add_base(base); + return builder_.Finish(); +} + +struct ReduceDescriptor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { + typedef ReduceDescriptorBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TARGETHEIGHT = 4, + VT_TARGETWIDTH = 6, + VT_KEEPDIMS = 8, + VT_AXIS = 10, + VT_REDUCEOPERATION = 12 + }; + uint32_t targetHeight() const { + return GetField<uint32_t>(VT_TARGETHEIGHT, 0); + } + uint32_t targetWidth() const { + return GetField<uint32_t>(VT_TARGETWIDTH, 0); + } + bool keepDims() const { + return GetField<uint8_t>(VT_KEEPDIMS, 0) != 0; + } + const flatbuffers::Vector<uint32_t> *axis() const { + return GetPointer<const flatbuffers::Vector<uint32_t> *>(VT_AXIS); + } + armnnSerializer::ReduceOperation reduceOperation() const { + return static_cast<armnnSerializer::ReduceOperation>(GetField<int8_t>(VT_REDUCEOPERATION, 0)); + } + bool Verify(flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField<uint32_t>(verifier, VT_TARGETHEIGHT) && + VerifyField<uint32_t>(verifier, VT_TARGETWIDTH) && + VerifyField<uint8_t>(verifier, VT_KEEPDIMS) && + VerifyOffset(verifier, VT_AXIS) && + verifier.VerifyVector(axis()) && + VerifyField<int8_t>(verifier, VT_REDUCEOPERATION) && + verifier.EndTable(); + } +}; + +struct ReduceDescriptorBuilder { + typedef ReduceDescriptor Table; + flatbuffers::FlatBufferBuilder &fbb_; + flatbuffers::uoffset_t start_; + void add_targetHeight(uint32_t targetHeight) { + fbb_.AddElement<uint32_t>(ReduceDescriptor::VT_TARGETHEIGHT, targetHeight, 0); + } + void add_targetWidth(uint32_t targetWidth) { + fbb_.AddElement<uint32_t>(ReduceDescriptor::VT_TARGETWIDTH, targetWidth, 0); + } + void add_keepDims(bool keepDims) { + fbb_.AddElement<uint8_t>(ReduceDescriptor::VT_KEEPDIMS, static_cast<uint8_t>(keepDims), 0); + } + void add_axis(flatbuffers::Offset<flatbuffers::Vector<uint32_t>> axis) { + fbb_.AddOffset(ReduceDescriptor::VT_AXIS, axis); + } + void add_reduceOperation(armnnSerializer::ReduceOperation reduceOperation) { + fbb_.AddElement<int8_t>(ReduceDescriptor::VT_REDUCEOPERATION, static_cast<int8_t>(reduceOperation), 0); + } + explicit ReduceDescriptorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ReduceDescriptorBuilder &operator=(const ReduceDescriptorBuilder &); + flatbuffers::Offset<ReduceDescriptor> Finish() { + const auto end = fbb_.EndTable(start_); + auto o = flatbuffers::Offset<ReduceDescriptor>(end); + return o; + } +}; + +inline flatbuffers::Offset<ReduceDescriptor> CreateReduceDescriptor( + flatbuffers::FlatBufferBuilder &_fbb, + uint32_t targetHeight = 0, + uint32_t targetWidth = 0, + bool keepDims = false, + flatbuffers::Offset<flatbuffers::Vector<uint32_t>> axis = 0, + armnnSerializer::ReduceOperation reduceOperation = armnnSerializer::ReduceOperation_Sum) { + ReduceDescriptorBuilder builder_(_fbb); + builder_.add_axis(axis); + builder_.add_targetWidth(targetWidth); + builder_.add_targetHeight(targetHeight); + builder_.add_reduceOperation(reduceOperation); + builder_.add_keepDims(keepDims); + return builder_.Finish(); +} + +inline flatbuffers::Offset<ReduceDescriptor> CreateReduceDescriptorDirect( + flatbuffers::FlatBufferBuilder &_fbb, + uint32_t targetHeight = 0, + uint32_t targetWidth = 0, + bool keepDims = false, + const std::vector<uint32_t> *axis = nullptr, + armnnSerializer::ReduceOperation reduceOperation = armnnSerializer::ReduceOperation_Sum) { + auto axis__ = axis ? _fbb.CreateVector<uint32_t>(*axis) : 0; + return armnnSerializer::CreateReduceDescriptor( + _fbb, + targetHeight, + targetWidth, + keepDims, + axis__, + reduceOperation); +} + struct AnyLayer FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { typedef AnyLayerBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { @@ -9290,6 +9497,9 @@ struct AnyLayer FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const armnnSerializer::LogicalBinaryLayer *layer_as_LogicalBinaryLayer() const { return layer_type() == armnnSerializer::Layer_LogicalBinaryLayer ? static_cast<const armnnSerializer::LogicalBinaryLayer *>(layer()) : nullptr; } + const armnnSerializer::ReduceLayer *layer_as_ReduceLayer() const { + return layer_type() == armnnSerializer::Layer_ReduceLayer ? static_cast<const armnnSerializer::ReduceLayer *>(layer()) : nullptr; + } bool Verify(flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField<uint8_t>(verifier, VT_LAYER_TYPE) && @@ -9539,6 +9749,10 @@ template<> inline const armnnSerializer::LogicalBinaryLayer *AnyLayer::layer_as< return layer_as_LogicalBinaryLayer(); } +template<> inline const armnnSerializer::ReduceLayer *AnyLayer::layer_as<armnnSerializer::ReduceLayer>() const { + return layer_as_ReduceLayer(); +} + struct AnyLayerBuilder { typedef AnyLayer Table; flatbuffers::FlatBufferBuilder &fbb_; @@ -9989,6 +10203,10 @@ inline bool VerifyLayer(flatbuffers::Verifier &verifier, const void *obj, Layer auto ptr = reinterpret_cast<const armnnSerializer::LogicalBinaryLayer *>(obj); return verifier.VerifyTable(ptr); } + case Layer_ReduceLayer: { + auto ptr = reinterpret_cast<const armnnSerializer::ReduceLayer *>(obj); + return verifier.VerifyTable(ptr); + } default: return true; } } diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp index a0c99b9cca..a2217a3dc4 100644 --- a/src/armnnSerializer/Serializer.cpp +++ b/src/armnnSerializer/Serializer.cpp @@ -904,6 +904,25 @@ void SerializerVisitor::VisitRankLayer(const armnn::IConnectableLayer* layer, CreateAnyLayer(flatBufferRankLayer.o, serializer::Layer::Layer_RankLayer); } + +void SerializerVisitor::VisitReduceLayer(const armnn::IConnectableLayer* layer, + const armnn::ReduceDescriptor& reduceDescriptor, + const char*) +{ + auto fbReduceBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Reduce); + auto fbDescriptor = CreateReduceDescriptor(m_flatBufferBuilder, + reduceDescriptor.m_TargetHeight, + reduceDescriptor.m_TargetWidth, + reduceDescriptor.m_KeepDims, + m_flatBufferBuilder.CreateVector(reduceDescriptor.m_vAxis), + GetFlatBufferReduceOperation(reduceDescriptor.m_ReduceOperation)); + auto fbReduceLayer = serializer::CreateReduceLayer(m_flatBufferBuilder, + fbReduceBaseLayer, + fbDescriptor); + + CreateAnyLayer(fbReduceLayer.o, serializer::Layer::Layer_ReduceLayer); +} + // Build FlatBuffer for Reshape Layer void SerializerVisitor::VisitReshapeLayer(const armnn::IConnectableLayer* layer, const armnn::ReshapeDescriptor& reshapeDescriptor, diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp index f28be09036..10971fddc8 100644 --- a/src/armnnSerializer/Serializer.hpp +++ b/src/armnnSerializer/Serializer.hpp @@ -226,6 +226,10 @@ public: void VisitRankLayer(const armnn::IConnectableLayer* layer, const char* name = nullptr) override; + void VisitReduceLayer(const armnn::IConnectableLayer* layer, + const armnn::ReduceDescriptor& reduceDescriptor, + const char* name = nullptr) override; + void VisitReshapeLayer(const armnn::IConnectableLayer* layer, const armnn::ReshapeDescriptor& reshapeDescriptor, const char* name = nullptr) override; diff --git a/src/armnnSerializer/SerializerUtils.cpp b/src/armnnSerializer/SerializerUtils.cpp index 045d6aac5c..32ac75e024 100644 --- a/src/armnnSerializer/SerializerUtils.cpp +++ b/src/armnnSerializer/SerializerUtils.cpp @@ -198,4 +198,21 @@ armnnSerializer::ResizeMethod GetFlatBufferResizeMethod(armnn::ResizeMethod meth } } +armnnSerializer::ReduceOperation GetFlatBufferReduceOperation(armnn::ReduceOperation reduceOperation) +{ + switch (reduceOperation) + { + case armnn::ReduceOperation::Sum: + return armnnSerializer::ReduceOperation::ReduceOperation_Sum; + case armnn::ReduceOperation::Max: + return armnnSerializer::ReduceOperation::ReduceOperation_Max; + case armnn::ReduceOperation::Mean: + return armnnSerializer::ReduceOperation::ReduceOperation_Mean; + case armnn::ReduceOperation::Min: + return armnnSerializer::ReduceOperation::ReduceOperation_Min; + default: + return armnnSerializer::ReduceOperation::ReduceOperation_Sum; + } +} + } // namespace armnnSerializer diff --git a/src/armnnSerializer/SerializerUtils.hpp b/src/armnnSerializer/SerializerUtils.hpp index a3cf5ba3c1..55179864e8 100644 --- a/src/armnnSerializer/SerializerUtils.hpp +++ b/src/armnnSerializer/SerializerUtils.hpp @@ -38,4 +38,6 @@ armnnSerializer::ResizeMethod GetFlatBufferResizeMethod(armnn::ResizeMethod meth armnnSerializer::LogicalBinaryOperation GetFlatBufferLogicalBinaryOperation( armnn::LogicalBinaryOperation logicalBinaryOperation); +armnnSerializer::ReduceOperation GetFlatBufferReduceOperation(armnn::ReduceOperation reduceOperation); + } // namespace armnnSerializer diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp index 11177f5d04..44e8a3898e 100644 --- a/src/armnnSerializer/test/SerializerTests.cpp +++ b/src/armnnSerializer/test/SerializerTests.cpp @@ -2297,6 +2297,36 @@ BOOST_AUTO_TEST_CASE(SerializeRank) deserializedNetwork->Accept(verifier); } +BOOST_AUTO_TEST_CASE(SerializeReduceSum) +{ + DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Reduce) + + const std::string layerName("Reduce_Sum"); + const armnn::TensorInfo inputInfo({1, 1, 3, 2}, armnn::DataType::Float32); + const armnn::TensorInfo outputInfo({1, 1, 1, 2}, armnn::DataType::Float32); + + armnn::ReduceDescriptor descriptor; + descriptor.m_vAxis = { 2 }; + descriptor.m_ReduceOperation = armnn::ReduceOperation::Sum; + + armnn::INetworkPtr network = armnn::INetwork::Create(); + armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0); + armnn::IConnectableLayer* const reduceSumLayer = network->AddReduceLayer(descriptor, layerName.c_str()); + armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0); + + inputLayer->GetOutputSlot(0).Connect(reduceSumLayer->GetInputSlot(0)); + reduceSumLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0)); + + inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo); + reduceSumLayer->GetOutputSlot(0).SetTensorInfo(outputInfo); + + armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network)); + BOOST_CHECK(deserializedNetwork); + + ReduceLayerVerifier verifier(layerName, {inputInfo}, {outputInfo}, descriptor); + deserializedNetwork->Accept(verifier); +} + BOOST_AUTO_TEST_CASE(SerializeReshape) { DECLARE_LAYER_VERIFIER_CLASS_WITH_DESCRIPTOR(Reshape) |