21 #include <boost/filesystem.hpp> 22 #include <boost/format.hpp> 23 #include <boost/assert.hpp> 24 #include <boost/format.hpp> 25 #include <boost/format.hpp> 26 #include <boost/numeric/conversion/cast.hpp> 27 #include <boost/polymorphic_cast.hpp> 35 using namespace armnn;
44 const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
46 void CheckGraph(
const Deserializer::GraphPtr& graph,
47 unsigned int layersIndex,
50 if (graph->layers() ==
nullptr)
54 boost::format(
"%1% was called with invalid (null) graph. " 55 "Possible reason is that the graph is not yet loaded and Unpack(ed). " 56 "layers:%2% at %3%") %
61 else if (layersIndex >= graph->layers()->size())
65 boost::format(
"%1% was called with an invalid layers index. " 66 "layers:%2% at %3%") %
73 void CheckLayers(
const Deserializer::GraphPtr& graph,
74 unsigned int layersIndex,
75 unsigned int layerIndex,
78 if (graph->layers() ==
nullptr)
82 boost::format(
"%1% was called with invalid (null) graph. " 83 "Possible reason is that the graph is not yet loaded and Unpack(ed). " 84 "layers:%2% at %3%") %
89 else if (layersIndex >= graph->layers()->size())
93 boost::format(
"%1% was called with an invalid layers index. " 94 "layers:%2% at %3%") %
99 else if (layerIndex >= graph->layers()[layersIndex].size()
100 && layerIndex != VIRTUAL_LAYER_ID)
104 boost::format(
"%1% was called with an invalid layer index. " 105 "layers:%2% layer:%3% at %4%") %
116 if (rawPtr ==
nullptr)
120 boost::format(
"%1% was called with a null tensor pointer. " 128 void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
131 if (rawPtr ==
nullptr)
133 throw ParseException(boost::str(boost::format(
"%1% was called with a null const tensor pointer. at %2%") %
139 void CheckConstTensorSize(
const unsigned int constTensorSize,
140 const unsigned int tensorSize,
143 if (constTensorSize != tensorSize)
145 throw ParseException(boost::str(boost::format(
"%1% wrong number of components supplied to tensor. at:%2%") %
151 #define CHECK_TENSOR_PTR(TENSOR_PTR) \ 152 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION()) 154 #define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \ 155 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION()) 157 #define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \ 158 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION()) 160 #define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \ 161 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION()) 163 #define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \ 164 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION()) 170 if (actualSize != expected.size())
175 for (
unsigned int i = 0u; i < actualSize; i++)
177 if (actual[i] != static_cast<unsigned int>(expected[i]))
186 Deserializer::Deserializer()
187 : m_Network(nullptr, nullptr),
189 m_ParserFunctions(Layer_MAX+1, &
Deserializer::ParseUnsupportedLayer)
192 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
193 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
194 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
195 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
196 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
197 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
198 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
199 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
200 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
201 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
202 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
203 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
204 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
205 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
206 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
207 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
208 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
209 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
210 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
211 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
212 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
213 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
214 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
215 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
216 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
217 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
218 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
219 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
220 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
221 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
222 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
223 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
224 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
225 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
226 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
227 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
228 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
229 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
230 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
231 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
232 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
233 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
234 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
235 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
236 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
237 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
238 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
239 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
240 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
241 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
242 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
243 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
244 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
245 m_ParserFunctions[Layer_TransposeLayer] = &Deserializer::ParseTranspose;
250 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
254 case Layer::Layer_AbsLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
256 case Layer::Layer_ActivationLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
258 case Layer::Layer_AdditionLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
260 case Layer::Layer_ArgMinMaxLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
262 case Layer::Layer_BatchToSpaceNdLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
264 case Layer::Layer_BatchNormalizationLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
266 case Layer::Layer_ComparisonLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
268 case Layer::Layer_ConcatLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
270 case Layer::Layer_ConstantLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
272 case Layer::Layer_Convolution2dLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
274 case Layer::Layer_DepthToSpaceLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
276 case Layer::Layer_DepthwiseConvolution2dLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
278 case Layer::Layer_DequantizeLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
280 case Layer::Layer_DetectionPostProcessLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
282 case Layer::Layer_DivisionLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
284 case Layer::Layer_EqualLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
286 case Layer::Layer_FullyConnectedLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
288 case Layer::Layer_FloorLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
290 case Layer::Layer_GatherLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
292 case Layer::Layer_GreaterLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
294 case Layer::Layer_InputLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
296 case Layer::Layer_InstanceNormalizationLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
298 case Layer::Layer_L2NormalizationLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
300 case Layer::Layer_LogSoftmaxLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
302 case Layer::Layer_LstmLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
304 case Layer::Layer_MeanLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
306 case Layer::Layer_MinimumLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
308 case Layer::Layer_MaximumLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
310 case Layer::Layer_MergeLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
312 case Layer::Layer_MergerLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
314 case Layer::Layer_MultiplicationLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
316 case Layer::Layer_NormalizationLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
318 case Layer::Layer_OutputLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
320 case Layer::Layer_PadLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
322 case Layer::Layer_PermuteLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
324 case Layer::Layer_Pooling2dLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
326 case Layer::Layer_PreluLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
328 case Layer::Layer_QuantizeLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
330 case Layer::Layer_QuantizedLstmLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
332 case Layer::Layer_ReshapeLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
334 case Layer::Layer_ResizeBilinearLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
336 case Layer::Layer_ResizeLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
338 case Layer::Layer_RsqrtLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
340 case Layer::Layer_SliceLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
342 case Layer::Layer_SoftmaxLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
344 case Layer::Layer_SpaceToBatchNdLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
346 case Layer::Layer_SpaceToDepthLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
348 case Layer::Layer_SplitterLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
350 case Layer::Layer_StackLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
352 case Layer::Layer_StandInLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
354 case Layer::Layer_StridedSliceLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
356 case Layer::Layer_SubtractionLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
358 case Layer::Layer_SwitchLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
360 case Layer::Layer_TransposeConvolution2dLayer:
361 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
362 case Layer::Layer_TransposeLayer:
363 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
364 case Layer::Layer_NONE:
367 boost::format(
"Layer type %1% not recognized") %
376 return layer->layerName()->str();
381 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
383 if (layerType == Layer::Layer_InputLayer)
385 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
387 else if ( layerType == Layer::Layer_OutputLayer )
389 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
398 case armnnSerializer::DataLayout::DataLayout_NHWC:
400 case armnnSerializer::DataLayout::DataLayout_NCHW:
410 case armnnSerializer::ActivationFunction_Sigmoid:
412 case armnnSerializer::ActivationFunction_TanH:
414 case armnnSerializer::ActivationFunction_Linear:
416 case armnnSerializer::ActivationFunction_ReLu:
418 case armnnSerializer::ActivationFunction_BoundedReLu:
420 case armnnSerializer::ActivationFunction_LeakyReLu:
422 case armnnSerializer::ActivationFunction_Abs:
424 case armnnSerializer::ActivationFunction_Sqrt:
426 case armnnSerializer::ActivationFunction_Square:
428 case armnnSerializer::ActivationFunction_Elu:
430 case armnnSerializer::ActivationFunction_HardSwish:
441 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
443 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
453 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
455 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
457 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
459 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
461 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
463 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
473 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
475 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
477 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
479 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
481 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
492 case armnnSerializer::ResizeMethod_NearestNeighbor:
494 case armnnSerializer::ResizeMethod_Bilinear:
506 switch (tensorPtr->dataType())
508 case DataType_QAsymmS8:
511 case DataType_QuantisedAsymm8:
512 case DataType_QAsymmU8:
515 case DataType_QSymmS16:
516 case DataType_QuantisedSymm16:
519 case DataType_Signed32:
522 case DataType_Float32:
525 case DataType_Float16:
528 case DataType_Boolean:
536 boost::format(
"Unsupported data type %1% = %2%. %3%") %
537 tensorPtr->dataType() %
538 EnumNameDataType(tensorPtr->dataType()) %
542 float quantizationScale = tensorPtr->quantizationScale();
543 int32_t quantizationOffset = tensorPtr->quantizationOffset();
545 auto dimensions = tensorPtr->dimensions();
546 unsigned int size = dimensions->size();
547 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
563 switch (constTensorPtr->data_type())
565 case ConstTensorData_ByteData:
567 auto byteData = constTensorPtr->data_as_ByteData()->data();
571 case ConstTensorData_ShortData:
573 auto shortData = constTensorPtr->data_as_ShortData()->data();
577 case ConstTensorData_IntData:
579 auto intData = constTensorPtr->data_as_IntData()->data();
583 case ConstTensorData_LongData:
585 auto longData = constTensorPtr->data_as_LongData()->data();
593 boost::str(boost::format(
"Unsupported data type %1% = %2%. %3%") %
594 constTensorPtr->data_type() %
595 EnumNameConstTensorData(constTensorPtr->data_type()) %
602 unsigned int layerIndex)
606 const auto& numInputs = layer->inputSlots()->size();
610 for (
unsigned int i=0; i<numInputs; ++i)
613 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
614 result[i] =
GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
620 unsigned int layerIndex)
624 const auto& numOutputs = layer->outputSlots()->size();
628 for (
unsigned int i=0; i<numOutputs; ++i)
630 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
635 void Deserializer::ParseUnsupportedLayer(
GraphPtr graph,
unsigned int layerIndex)
638 const auto layerName =
GetBaseLayer(graph, layerIndex)->layerName()->c_str();
641 boost::format(
"Layer not supported. " 643 "layerName: %2% / %3%") %
649 void Deserializer::ResetParser()
652 m_InputBindings.clear();
653 m_OutputBindings.clear();
675 return CreateNetworkFromGraph(graph);
681 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
683 return CreateNetworkFromGraph(graph);
688 if (binaryContent ==
nullptr)
693 flatbuffers::Verifier verifier(binaryContent, len);
694 if (verifier.VerifyBuffer<SerializedGraph>() ==
false)
697 boost::str(boost::format(
"Buffer doesn't conform to the expected Armnn " 698 "flatbuffers format. size:%1% %2%") %
702 return GetSerializedGraph(binaryContent);
707 m_Network = INetwork::Create();
708 BOOST_ASSERT(graph !=
nullptr);
709 unsigned int layerIndex = 0;
710 for (AnyLayer
const* layer : *graph->layers())
712 if (layer->layer_type() != Layer_InputLayer &&
713 layer->layer_type() != Layer_OutputLayer)
716 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
717 (this->*parserFunction)(graph, layerIndex);
722 SetupInputLayers(graph);
723 SetupOutputLayers(graph);
726 for (
auto&& graphIt : m_GraphConnections)
728 Connections& connections = graphIt.second;
729 for (
auto&& outputIt : connections.outputSlots)
731 const unsigned int outputSlotIndex = outputIt.first;
733 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
735 for (
IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
737 outputSlot->
Connect(*inputSlot);
743 return std::move(m_Network);
747 const std::string& name)
const 750 for (
auto inputBinding : m_InputBindings)
752 if (inputBinding.first == name)
754 return inputBinding.second;
759 boost::format(
"No input binding found for layer:%1% / %2%") %
765 const std::string& name)
const 768 for (
auto outputBinding : m_OutputBindings)
770 if (outputBinding.first == name)
772 return outputBinding.second;
777 boost::format(
"No output binding found for layer:%1% / %2%") %
782 unsigned int Deserializer::GetInputLayerInVector(
GraphPtr graph,
int targetId)
784 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
786 auto layer = graph->layers()->Get(i);
787 if (layer->layer_type() == Layer::Layer_InputLayer)
789 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
790 if (layerBindingId == targetId)
796 throw ParseException(
"Input layer with given layerBindingId not found");
799 unsigned int Deserializer::GetOutputLayerInVector(
GraphPtr graph,
int targetId)
801 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
803 auto layer = graph->layers()->Get(i);
804 if (layer->layer_type() == Layer::Layer_OutputLayer)
806 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
807 if (layerBindingId == targetId)
813 throw ParseException(
"Output layer with given layerBindingId not found");
816 unsigned int Deserializer::GetLayerIndexInVector(
GraphPtr graph,
unsigned int targetIndex)
818 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
821 if (layer->index() == targetIndex)
829 Deserializer::FeatureVersions Deserializer::GetFeatureVersions(
GraphPtr graph)
831 Deserializer::FeatureVersions versions;
833 if (graph->featureVersions())
835 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
841 void Deserializer::SetupInputLayers(
GraphPtr graph)
844 const unsigned int numInputs = graph->inputIds()->size();
845 m_InputBindings.clear();
846 m_InputBindings.reserve(numInputs);
848 for (
unsigned int i = 0; i < numInputs; i++)
850 unsigned int inputLayerIndex = 0xFFFFFFFF;
851 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
854 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
858 const int inputId = graph->inputIds()->Get(i);
859 inputLayerIndex = GetInputLayerInVector(graph, inputId);
866 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(),
"Input has no name.");
869 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
872 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
873 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
876 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
880 void Deserializer::SetupOutputLayers(
GraphPtr graph)
883 const unsigned int numOutputs = graph->outputIds()->size();
884 m_OutputBindings.clear();
885 m_OutputBindings.reserve(numOutputs);
887 for (
unsigned int i = 0; i < numOutputs; i++)
889 unsigned int outputLayerIndex = 0xFFFFFFFF;
890 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
892 const unsigned int outputId =
boost::numeric_cast<
unsigned int>(graph->outputIds()->Get(i));
893 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
897 const int outputId = graph->outputIds()->Get(i);
898 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
905 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(),
"Input has no name.");
908 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
910 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
912 unsigned int sourceLayerIndex =
913 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
918 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
922 void Deserializer::RegisterOutputSlots(
GraphPtr graph,
927 BOOST_ASSERT(layer !=
nullptr);
932 boost::str(boost::format(
"The number of outputslots (%1%) does not match the number expected (%2%)" 933 " for layer index: %3% %4%") %
934 baseLayer->outputSlots()->size() %
942 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
945 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
949 void Deserializer::RegisterInputSlots(
GraphPtr graph,
954 BOOST_ASSERT(layer !=
nullptr);
959 boost::str(boost::format(
"The number of inputslots (%1%) does not match the number expected (%2%)" 960 " for layer index:%3% %4%") %
961 baseLayer->inputSlots()->size() %
969 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
970 auto fbConnection = fbInputSlot->connection();
972 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
976 void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
977 uint32_t outputSlotIndex,
980 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
982 m_GraphConnections[sourceLayerIndex] = Connections();
985 Connections& connections = m_GraphConnections[sourceLayerIndex];
986 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
988 connections.inputSlots[outputSlotIndex] = {inputSlot};
992 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
996 void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
997 uint32_t outputSlotIndex,
1000 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1002 m_GraphConnections[sourceLayerIndex] = Connections();
1005 Connections& connections = m_GraphConnections[sourceLayerIndex];
1006 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1011 connections.outputSlots[outputSlotIndex] = outputSlot;
1017 auto inputs =
GetInputs(graph, layerIndex);
1021 auto outputs =
GetOutputs(graph, layerIndex);
1027 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1031 RegisterInputSlots(graph, layerIndex, layer);
1032 RegisterOutputSlots(graph, layerIndex, layer);
1035 void Deserializer::ParseActivation(
GraphPtr graph,
unsigned int layerIndex)
1038 auto inputs =
GetInputs(graph, layerIndex);
1042 auto outputs =
GetOutputs(graph, layerIndex);
1045 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
1047 auto serializerDescriptor = serializerLayer->descriptor();
1051 descriptor.
m_A = serializerDescriptor->a();
1052 descriptor.
m_B = serializerDescriptor->b();
1059 RegisterInputSlots(graph, layerIndex, layer);
1060 RegisterOutputSlots(graph, layerIndex, layer);
1063 void Deserializer::ParseAdd(
GraphPtr graph,
unsigned int layerIndex)
1066 auto inputs =
GetInputs(graph, layerIndex);
1070 auto outputs =
GetOutputs(graph, layerIndex);
1079 RegisterInputSlots(graph, layerIndex, layer);
1080 RegisterOutputSlots(graph, layerIndex, layer);
1083 void Deserializer::ParseArgMinMax(
GraphPtr graph,
unsigned int layerIndex)
1086 auto inputs =
GetInputs(graph, layerIndex);
1090 auto outputs =
GetOutputs(graph, layerIndex);
1093 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1094 auto serializerDescriptor = serializerLayer->descriptor();
1098 descriptor.
m_Axis = serializerDescriptor->axis();
1100 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1105 RegisterInputSlots(graph, layerIndex, layer);
1106 RegisterOutputSlots(graph, layerIndex, layer);
1109 void Deserializer::ParseBatchToSpaceNd(
GraphPtr graph,
unsigned int layerIndex)
1119 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1120 auto flatBufferCrops = flatBufferDescriptor->crops();
1121 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1123 if (flatBufferCrops->Length() % 2 != 0)
1126 boost::format(
"The size of crops must be divisible by 2 %1%") %
CHECK_LOCATION().AsString()));
1129 std::vector<std::pair<unsigned int, unsigned int>> crops;
1130 crops.reserve(flatBufferCrops->Length() / 2);
1131 for (
unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1133 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1139 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1143 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1148 RegisterInputSlots(graph, layerIndex, layer);
1149 RegisterOutputSlots(graph, layerIndex, layer);
1152 void Deserializer::ParseBatchNormalization(
GraphPtr graph,
unsigned int layerIndex)
1156 auto inputs =
GetInputs(graph, layerIndex);
1159 auto outputs =
GetOutputs(graph, layerIndex);
1165 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1166 auto serializerDescriptor = serializerLayer->descriptor();
1169 descriptor.
m_Eps = serializerDescriptor->eps();
1183 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1185 RegisterInputSlots(graph, layerIndex, layer);
1186 RegisterOutputSlots(graph, layerIndex, layer);
1189 void Deserializer::ParseConstant(
GraphPtr graph,
unsigned int layerIndex)
1194 auto outputs =
GetOutputs(graph, layerIndex);
1199 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1200 auto serializerInput = serializerLayer->input();
1204 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1209 RegisterOutputSlots(graph, layerIndex, layer);
1212 void Deserializer::ParseConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
1215 auto inputs =
GetInputs(graph, layerIndex);
1219 auto outputs =
GetOutputs(graph, layerIndex);
1222 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1224 auto serializerDescriptor = serializerLayer->descriptor();
1227 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
1228 descriptor.
m_PadRight = serializerDescriptor->padRight();
1229 descriptor.
m_PadTop = serializerDescriptor->padTop();
1230 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
1231 descriptor.
m_StrideX = serializerDescriptor->strideX();
1232 descriptor.
m_StrideY = serializerDescriptor->strideY();;
1233 descriptor.
m_DilationX = serializerDescriptor->dilationX();
1234 descriptor.
m_DilationY = serializerDescriptor->dilationY();;
1235 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();;
1252 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1254 RegisterInputSlots(graph, layerIndex, layer);
1255 RegisterOutputSlots(graph, layerIndex, layer);
1258 void Deserializer::ParseDepthToSpace(
GraphPtr graph,
unsigned int layerIndex)
1262 auto inputs =
GetInputs(graph, layerIndex);
1265 auto outputs =
GetOutputs(graph, layerIndex);
1268 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1271 descriptor.
m_BlockSize = fbDescriptor->blockSize();
1275 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1280 RegisterInputSlots(graph, layerIndex, layer);
1281 RegisterOutputSlots(graph, layerIndex, layer);
1284 void Deserializer::ParseDepthwiseConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
1287 auto inputs =
GetInputs(graph, layerIndex);
1291 auto outputs =
GetOutputs(graph, layerIndex);
1294 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
1296 auto serializerDescriptor = serializerLayer->descriptor();
1299 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
1300 descriptor.
m_PadRight = serializerDescriptor->padRight();
1301 descriptor.
m_PadTop = serializerDescriptor->padTop();
1302 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
1303 descriptor.
m_StrideX = serializerDescriptor->strideX();
1304 descriptor.
m_StrideY = serializerDescriptor->strideY();
1305 descriptor.
m_DilationX = serializerDescriptor->dilationX();
1306 descriptor.
m_DilationY = serializerDescriptor->dilationY();
1307 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();;
1319 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1325 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1327 RegisterInputSlots(graph, layerIndex, layer);
1328 RegisterOutputSlots(graph, layerIndex, layer);
1331 void Deserializer::ParseDetectionPostProcess(
GraphPtr graph,
unsigned int layerIndex)
1334 auto inputs =
GetInputs(graph, layerIndex);
1338 auto outputs =
GetOutputs(graph, layerIndex);
1341 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1343 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1351 descriptor.
m_NumClasses = flatBufferDescriptor->numClasses();
1353 descriptor.
m_ScaleX = flatBufferDescriptor->scaleX();
1354 descriptor.
m_ScaleY = flatBufferDescriptor->scaleY();
1355 descriptor.
m_ScaleW = flatBufferDescriptor->scaleW();
1356 descriptor.
m_ScaleH = flatBufferDescriptor->scaleH();
1364 for (
unsigned int i = 0; i < 4; i++)
1366 layer->GetOutputSlot(i).SetTensorInfo(
ToTensorInfo(outputs[i]));
1369 RegisterInputSlots(graph, layerIndex, layer);
1370 RegisterOutputSlots(graph, layerIndex, layer);
1373 void Deserializer::ParseDivision(
GraphPtr graph,
unsigned int layerIndex)
1376 auto inputs =
GetInputs(graph, layerIndex);
1380 auto outputs =
GetOutputs(graph, layerIndex);
1389 RegisterInputSlots(graph, layerIndex, layer);
1390 RegisterOutputSlots(graph, layerIndex, layer);
1393 void Deserializer::ParseEqual(
GraphPtr graph,
unsigned int layerIndex)
1396 auto inputs =
GetInputs(graph, layerIndex);
1400 auto outputs =
GetOutputs(graph, layerIndex);
1405 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1410 RegisterInputSlots(graph, layerIndex, layer);
1411 RegisterOutputSlots(graph, layerIndex, layer);
1414 void Deserializer::ParseGreater(
GraphPtr graph,
unsigned int layerIndex)
1417 auto inputs =
GetInputs(graph, layerIndex);
1421 auto outputs =
GetOutputs(graph, layerIndex);
1426 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1431 RegisterInputSlots(graph, layerIndex, layer);
1432 RegisterOutputSlots(graph, layerIndex, layer);
1435 void Deserializer::ParseInstanceNormalization(
GraphPtr graph,
unsigned int layerIndex)
1439 auto inputs =
GetInputs(graph, layerIndex);
1442 auto outputs =
GetOutputs(graph, layerIndex);
1445 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1446 auto fbDescriptor = fbLayer->descriptor();
1449 descriptor.
m_Gamma = fbDescriptor->gamma();
1450 descriptor.
m_Beta = fbDescriptor->beta();
1451 descriptor.
m_Eps = fbDescriptor->eps();
1454 const std::string layerName =
GetLayerName(graph, layerIndex);
1457 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1460 RegisterInputSlots(graph, layerIndex, layer);
1461 RegisterOutputSlots(graph, layerIndex, layer);
1464 void Deserializer::ParseL2Normalization(
GraphPtr graph,
unsigned int layerIndex)
1468 auto inputs =
GetInputs(graph, layerIndex);
1471 auto outputs =
GetOutputs(graph, layerIndex);
1475 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1476 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1481 descriptor.
m_Eps = flatBufferDescriptor->eps();
1483 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1486 RegisterInputSlots(graph, layerIndex, layer);
1487 RegisterOutputSlots(graph, layerIndex, layer);
1490 void Deserializer::ParseLogSoftmax(
GraphPtr graph,
unsigned int layerIndex)
1501 descriptor.
m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1502 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1505 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1510 RegisterInputSlots(graph, layerIndex, layer);
1511 RegisterOutputSlots(graph, layerIndex, layer);
1514 void Deserializer::ParseMinimum(
GraphPtr graph,
unsigned int layerIndex)
1517 auto inputs =
GetInputs(graph, layerIndex);
1521 auto outputs =
GetOutputs(graph, layerIndex);
1530 RegisterInputSlots(graph, layerIndex, layer);
1531 RegisterOutputSlots(graph, layerIndex, layer);
1534 void Deserializer::ParseMaximum(
GraphPtr graph,
unsigned int layerIndex)
1537 auto inputs =
GetInputs(graph, layerIndex);
1541 auto outputs =
GetOutputs(graph, layerIndex);
1550 RegisterInputSlots(graph, layerIndex, layer);
1551 RegisterOutputSlots(graph, layerIndex, layer);
1555 unsigned int layerIndex)
1557 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1561 case Layer::Layer_ConcatLayer:
1562 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1563 case Layer::Layer_MergerLayer:
1564 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1570 void Deserializer::ParseComparison(
GraphPtr graph,
unsigned int layerIndex)
1575 auto inputs =
GetInputs(graph, layerIndex);
1578 auto outputs =
GetOutputs(graph, layerIndex);
1581 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1582 auto fbDescriptor = fbLayer->descriptor();
1587 const std::string& layerName =
GetLayerName(graph, layerIndex);
1588 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1593 RegisterInputSlots(graph, layerIndex, layer);
1594 RegisterOutputSlots(graph, layerIndex, layer);
1597 void Deserializer::ParseElementwiseUnary(
GraphPtr graph,
unsigned int layerIndex)
1602 auto inputs =
GetInputs(graph, layerIndex);
1605 auto outputs =
GetOutputs(graph, layerIndex);
1608 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1609 auto fbDescriptor = fbLayer->descriptor();
1614 const std::string& layerName =
GetLayerName(graph, layerIndex);
1615 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1620 RegisterInputSlots(graph, layerIndex, layer);
1621 RegisterOutputSlots(graph, layerIndex, layer);
1624 void Deserializer::ParseConcat(
GraphPtr graph,
unsigned int layerIndex)
1629 auto outputs =
GetOutputs(graph, layerIndex);
1634 unsigned int numViews = originsDescriptor->numViews();
1635 unsigned int numDimensions = originsDescriptor->numDimensions();
1638 auto inputs =
GetInputs(graph, layerIndex);
1642 auto originsPtr = originsDescriptor->viewOrigins();
1643 for (
unsigned int v = 0; v < numViews; ++v)
1645 auto originPtr = originsPtr->Get(v);
1646 for (
unsigned int d = 0; d < numDimensions; ++d)
1648 uint32_t value = originPtr->data()->Get(d);
1649 descriptor.SetViewOriginCoord(v, d, value);
1652 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
1654 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
1658 RegisterInputSlots(graph, layerIndex, layer);
1659 RegisterOutputSlots(graph, layerIndex, layer);
1662 void Deserializer::ParseMultiplication(
GraphPtr graph,
unsigned int layerIndex)
1665 auto inputs =
GetInputs(graph, layerIndex);
1669 auto outputs =
GetOutputs(graph, layerIndex);
1673 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
1678 RegisterInputSlots(graph, layerIndex, layer);
1679 RegisterOutputSlots(graph, layerIndex, layer);
1682 void Deserializer::ParseFloor(
GraphPtr graph,
unsigned int layerIndex)
1687 auto inputs =
GetInputs(graph, layerIndex);
1690 auto outputs =
GetOutputs(graph, layerIndex);
1697 layer = m_Network->AddFloorLayer(layerName.c_str());
1702 RegisterInputSlots(graph, layerIndex, layer);
1703 RegisterOutputSlots(graph, layerIndex, layer);
1706 void Deserializer::ParseFullyConnected(
GraphPtr graph,
unsigned int layerIndex)
1709 auto inputs =
GetInputs(graph, layerIndex);
1713 auto outputs =
GetOutputs(graph, layerIndex);
1716 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
1718 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1721 fullyConnectedDescriptor.
m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1728 if (flatBufferDescriptor->biasEnabled())
1733 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1739 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1741 RegisterInputSlots(graph, layerIndex, layer);
1742 RegisterOutputSlots(graph, layerIndex, layer);
1745 void Deserializer::ParsePad(
GraphPtr graph,
unsigned int layerIndex)
1755 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1756 auto flatBufferPadList = flatBufferDescriptor->padList();
1757 float padValue = flatBufferDescriptor->padValue();
1759 if (flatBufferPadList->Length() % 2 != 0)
1762 boost::format(
"The size of the pad list must be divisible by 2 %1%") %
CHECK_LOCATION().AsString()));
1765 std::vector<std::pair<unsigned int, unsigned int>> padList;
1766 padList.reserve(flatBufferPadList->Length() / 2);
1767 for (
unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1769 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1775 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1780 RegisterInputSlots(graph, layerIndex, layer);
1781 RegisterOutputSlots(graph, layerIndex, layer);
1784 void Deserializer::ParsePermute(
GraphPtr graph,
unsigned int layerIndex)
1789 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
1791 auto inputs =
GetInputs(graph, layerIndex);
1794 auto outputs =
GetOutputs(graph, layerIndex);
1801 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
1804 RegisterInputSlots(graph, layerIndex, layer);
1805 RegisterOutputSlots(graph, layerIndex, layer);
1809 unsigned int layerIndex)
1814 switch (pooling2dDesc->poolType())
1816 case PoolingAlgorithm_Average:
1821 case PoolingAlgorithm_Max:
1828 BOOST_ASSERT_MSG(
false,
"Unsupported pooling algorithm");
1832 switch (pooling2dDesc->outputShapeRounding())
1834 case OutputShapeRounding_Floor:
1839 case OutputShapeRounding_Ceiling:
1846 BOOST_ASSERT_MSG(
false,
"Unsupported output shape rounding");
1850 switch (pooling2dDesc->paddingMethod())
1852 case PaddingMethod_Exclude:
1857 case PaddingMethod_IgnoreValue:
1864 BOOST_ASSERT_MSG(
false,
"Unsupported padding method");
1868 switch (pooling2dDesc->dataLayout())
1870 case DataLayout_NCHW:
1875 case DataLayout_NHWC:
1882 BOOST_ASSERT_MSG(
false,
"Unsupported data layout");
1887 desc.
m_PadLeft = pooling2dDesc->padLeft();
1889 desc.
m_PadTop = pooling2dDesc->padTop();
1890 desc.
m_StrideX = pooling2dDesc->strideX();
1891 desc.
m_StrideY = pooling2dDesc->strideY();
1898 void Deserializer::ParsePooling2d(
GraphPtr graph,
unsigned int layerIndex)
1902 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
1903 auto inputs =
GetInputs(graph, layerIndex);
1906 auto outputs =
GetOutputs(graph, layerIndex);
1912 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
1915 RegisterInputSlots(graph, layerIndex, layer);
1916 RegisterOutputSlots(graph, layerIndex, layer);
1919 void Deserializer::ParseQuantize(
GraphPtr graph,
unsigned int layerIndex)
1923 auto inputs =
GetInputs(graph, layerIndex);
1926 auto outputs =
GetOutputs(graph, layerIndex);
1934 RegisterInputSlots(graph, layerIndex, layer);
1935 RegisterOutputSlots(graph, layerIndex, layer);
1939 const std::vector<uint32_t>& targetDimsIn)
1941 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1942 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1944 if (stretchDim != targetDimsIn.end())
1946 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1949 boost::format(
"At most one component of shape can be -1 %1%") %
CHECK_LOCATION().AsString()));
1952 auto targetNumElements =
1954 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1956 auto stretchIndex =
static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1957 outputDims[stretchIndex] = inputTensorInfo.
GetNumElements() / targetNumElements;
1968 void Deserializer::ParseReshape(
GraphPtr graph,
unsigned int layerIndex)
1971 auto inputs =
GetInputs(graph, layerIndex);
1973 auto outputs =
GetOutputs(graph, layerIndex);
1979 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
1980 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1983 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1985 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1986 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1988 if (inputs.size() > 1 && !
CheckShape(reshapeOutputTensorShape, expectedDims))
1990 std::stringstream ss;
1991 ss <<
"New shape defined in reshape parameters " 1992 << reshapeOutputTensorShape
1993 <<
" does not equal output shape " 1994 << actualOutputTensorInfo.
GetShape()
2004 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2007 RegisterInputSlots(graph, layerIndex, layer);
2008 RegisterOutputSlots(graph, layerIndex, layer);
2011 void Deserializer::ParseResize(
GraphPtr graph,
unsigned int layerIndex)
2021 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2024 descriptor.
m_TargetWidth = flatBufferDescriptor->targetWidth();
2025 descriptor.
m_TargetHeight = flatBufferDescriptor->targetHeight();
2030 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2035 RegisterInputSlots(graph, layerIndex, layer);
2036 RegisterOutputSlots(graph, layerIndex, layer);
2039 void Deserializer::ParseResizeBilinear(
GraphPtr graph,
unsigned int layerIndex)
2049 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2052 descriptor.
m_TargetWidth = flatBufferDescriptor->targetWidth();
2053 descriptor.
m_TargetHeight = flatBufferDescriptor->targetHeight();
2058 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2063 RegisterInputSlots(graph, layerIndex, layer);
2064 RegisterOutputSlots(graph, layerIndex, layer);
2067 void Deserializer::ParseSoftmax(
GraphPtr graph,
unsigned int layerIndex)
2078 descriptor.
m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
2081 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2086 RegisterInputSlots(graph, layerIndex, layer);
2087 RegisterOutputSlots(graph, layerIndex, layer);
2090 void Deserializer::ParseSpaceToBatchNd(
GraphPtr graph,
unsigned int layerIndex)
2100 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2101 auto flatBufferPadList = flatBufferDescriptor->padList();
2102 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2104 if (flatBufferPadList->Length() % 2 != 0)
2107 boost::format(
"The size of the pad list must be divisible by 2 %1%") %
CHECK_LOCATION().AsString()));
2110 std::vector<std::pair<unsigned int, unsigned int>> padList;
2111 padList.reserve(flatBufferPadList->Length() / 2);
2112 for (
unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2114 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2120 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2124 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2129 RegisterInputSlots(graph, layerIndex, layer);
2130 RegisterOutputSlots(graph, layerIndex, layer);
2133 void Deserializer::ParseSpaceToDepth(
GraphPtr graph,
unsigned int layerIndex)
2143 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2146 descriptor.
m_BlockSize = flatBufferDescriptor->blockSize();
2150 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2155 RegisterInputSlots(graph, layerIndex, layer);
2156 RegisterOutputSlots(graph, layerIndex, layer);
2161 unsigned int layerIndex)
2166 switch (normalizationDescriptor->normChannelType())
2168 case NormalizationAlgorithmChannel_Across:
2173 case NormalizationAlgorithmChannel_Within:
2180 BOOST_ASSERT_MSG(
false,
"Unsupported normalization channel type");
2184 switch (normalizationDescriptor->normMethodType())
2186 case NormalizationAlgorithmMethod_LocalBrightness:
2191 case NormalizationAlgorithmMethod_LocalContrast:
2198 BOOST_ASSERT_MSG(
false,
"Unsupported normalization method type");
2202 switch (normalizationDescriptor->dataLayout())
2204 case DataLayout_NCHW:
2209 case DataLayout_NHWC:
2216 BOOST_ASSERT_MSG(
false,
"Unsupported data layout");
2220 desc.
m_Alpha = normalizationDescriptor->alpha();
2221 desc.
m_Beta = normalizationDescriptor->beta();
2222 desc.
m_K = normalizationDescriptor->k();
2223 desc.
m_NormSize = normalizationDescriptor->normSize();
2228 void Deserializer::ParseNormalization(
GraphPtr graph,
unsigned int layerIndex)
2232 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2245 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2248 RegisterInputSlots(graph, layerIndex, layer);
2249 RegisterOutputSlots(graph, layerIndex, layer);
2252 void Deserializer::ParseRsqrt(
GraphPtr graph,
unsigned int layerIndex)
2255 auto inputs =
GetInputs(graph, layerIndex);
2259 auto outputs =
GetOutputs(graph, layerIndex);
2265 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2269 RegisterInputSlots(graph, layerIndex, layer);
2270 RegisterOutputSlots(graph, layerIndex, layer);
2273 void Deserializer::ParseSlice(
GraphPtr graph,
unsigned int layerIndex)
2277 auto inputs =
GetInputs(graph, layerIndex);
2280 auto outputs =
GetOutputs(graph, layerIndex);
2283 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2285 auto fbBegin = fbDescriptor->begin();
2286 auto fbSize = fbDescriptor->size();
2288 if (fbBegin->Length() != fbSize->Length())
2291 boost::format(
"Begin and size descriptors must have the same length %1%") %
CHECK_LOCATION().AsString()));
2295 descriptor.
m_Begin.insert(descriptor.
m_Begin.end(), fbBegin->begin(), fbBegin->end());
2296 descriptor.
m_Size.insert(descriptor.
m_Size.end(), fbSize->begin(), fbSize->end());
2299 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2304 RegisterInputSlots(graph, layerIndex, layer);
2305 RegisterOutputSlots(graph, layerIndex, layer);
2308 void Deserializer::ParseStridedSlice(
GraphPtr graph,
unsigned int layerIndex)
2318 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2320 auto flatBufferBegin = flatBufferDescriptor->begin();
2321 auto flatBufferEnd = flatBufferDescriptor->end();
2322 auto flatBufferStride = flatBufferDescriptor->stride();
2324 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2325 flatBufferBegin->Length() == flatBufferStride->Length()))
2328 boost::format(
"The size of the begin, end, and stride must be equal %1%") %
CHECK_LOCATION().AsString()));
2331 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2332 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2333 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2336 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2337 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2338 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2339 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2340 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2341 descriptor.m_DataLayout =
ToDataLayout(flatBufferDescriptor->dataLayout());
2344 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2349 RegisterInputSlots(graph, layerIndex, layer);
2350 RegisterOutputSlots(graph, layerIndex, layer);
2353 void Deserializer::ParseSubtraction(
GraphPtr graph,
unsigned int layerIndex)
2356 auto inputs =
GetInputs(graph, layerIndex);
2360 auto outputs =
GetOutputs(graph, layerIndex);
2369 RegisterInputSlots(graph, layerIndex, layer);
2370 RegisterOutputSlots(graph, layerIndex, layer);
2373 void Deserializer::ParseGather(
GraphPtr graph,
unsigned int layerIndex)
2389 RegisterInputSlots(graph, layerIndex, layer);
2390 RegisterOutputSlots(graph, layerIndex, layer);
2393 void Deserializer::ParseMean(
GraphPtr graph,
unsigned int layerIndex)
2403 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2404 auto flatBufferAxis = flatBufferDescriptor->axis();
2405 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2408 descriptor.
m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2412 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2417 RegisterInputSlots(graph, layerIndex, layer);
2418 RegisterOutputSlots(graph, layerIndex, layer);
2421 void Deserializer::ParseSplitter(
GraphPtr graph,
unsigned int layerIndex)
2430 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2431 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2432 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2433 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2434 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2435 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2442 for(
unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2444 for (
unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2446 viewsDescriptor.
SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2447 viewsDescriptor.
SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2452 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2455 for(
unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2461 RegisterInputSlots(graph, layerIndex, layer);
2462 RegisterOutputSlots(graph, layerIndex, layer);
2480 void Deserializer::ParseLstm(
GraphPtr graph,
unsigned int layerIndex)
2484 auto inputs =
GetInputs(graph, layerIndex);
2487 auto outputs =
GetOutputs(graph, layerIndex);
2490 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2492 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2493 auto flatBufferInputParams = flatBufferLayer->inputParams();
2523 if (!lstmDescriptor.m_CifgEnabled)
2525 inputToInputWeights =
ToConstTensor(flatBufferInputParams->inputToInputWeights());
2526 recurrentToInputWeights =
ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2527 cellToInputWeights =
ToConstTensor(flatBufferInputParams->cellToInputWeights());
2528 inputGateBias =
ToConstTensor(flatBufferInputParams->inputGateBias());
2538 if (lstmDescriptor.m_ProjectionEnabled)
2540 projectionWeights =
ToConstTensor(flatBufferInputParams->projectionWeights());
2541 projectionBias =
ToConstTensor(flatBufferInputParams->projectionBias());
2549 if (lstmDescriptor.m_PeepholeEnabled)
2551 cellToForgetWeights =
ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2552 cellToOutputWeights =
ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2562 if (lstmDescriptor.m_LayerNormEnabled)
2564 if (!lstmDescriptor.m_CifgEnabled)
2566 inputLayerNormWeights =
ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2569 forgetLayerNormWeights =
ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2570 cellLayerNormWeights =
ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2571 outputLayerNormWeights =
ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2578 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2592 RegisterInputSlots(graph, layerIndex, layer);
2593 RegisterOutputSlots(graph, layerIndex, layer);
2596 void Deserializer::ParseQuantizedLstm(
GraphPtr graph,
unsigned int layerIndex)
2600 auto inputs =
GetInputs(graph, layerIndex);
2603 auto outputs =
GetOutputs(graph, layerIndex);
2606 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2608 auto flatBufferInputParams = flatBufferLayer->inputParams();
2638 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2646 RegisterInputSlots(graph, layerIndex, layer);
2647 RegisterOutputSlots(graph, layerIndex, layer);
2650 void Deserializer::ParseDequantize(
GraphPtr graph,
unsigned int layerIndex)
2660 const std::string layerName =
GetLayerName(graph, layerIndex);
2666 RegisterInputSlots(graph, layerIndex, layer);
2667 RegisterOutputSlots(graph, layerIndex, layer);
2670 void Deserializer::ParseMerge(
GraphPtr graph,
unsigned int layerIndex)
2680 const std::string layerName =
GetLayerName(graph, layerIndex);
2686 RegisterInputSlots(graph, layerIndex, layer);
2687 RegisterOutputSlots(graph, layerIndex, layer);
2690 void Deserializer::ParseSwitch(
GraphPtr graph,
unsigned int layerIndex)
2693 auto inputs =
GetInputs(graph, layerIndex);
2697 auto outputs =
GetOutputs(graph, layerIndex);
2709 RegisterInputSlots(graph, layerIndex, layer);
2710 RegisterOutputSlots(graph, layerIndex, layer);
2713 void Deserializer::ParsePrelu(
GraphPtr graph,
unsigned int layerIndex)
2716 auto inputs =
GetInputs(graph, layerIndex);
2720 auto outputs =
GetOutputs(graph, layerIndex);
2729 RegisterInputSlots(graph, layerIndex, layer);
2730 RegisterOutputSlots(graph, layerIndex, layer);
2733 void Deserializer::ParseTranspose(
GraphPtr graph,
unsigned int layerIndex)
2737 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
2739 auto inputs =
GetInputs(graph, layerIndex);
2742 auto outputs =
GetOutputs(graph, layerIndex);
2749 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
2752 RegisterInputSlots(graph, layerIndex, layer);
2753 RegisterOutputSlots(graph, layerIndex, layer);
2756 void Deserializer::ParseTransposeConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
2760 auto inputs =
GetInputs(graph, layerIndex);
2763 auto outputs =
GetOutputs(graph, layerIndex);
2766 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2768 auto serializerDescriptor = serializerLayer->descriptor();
2771 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
2772 descriptor.
m_PadRight = serializerDescriptor->padRight();
2773 descriptor.
m_PadTop = serializerDescriptor->padTop();
2774 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
2775 descriptor.
m_StrideX = serializerDescriptor->strideX();
2776 descriptor.
m_StrideY = serializerDescriptor->strideY();;
2777 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();;
2786 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2789 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2795 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2797 RegisterInputSlots(graph, layerIndex, layer);
2798 RegisterOutputSlots(graph, layerIndex, layer);
2801 void Deserializer::ParseStack(
GraphPtr graph,
unsigned int layerIndex)
2804 auto inputs =
GetInputs(graph, layerIndex);
2806 auto outputs =
GetOutputs(graph, layerIndex);
2809 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2810 unsigned int axis = flatBufferDescriptor->axis();
2811 unsigned int numInputs = flatBufferDescriptor->numInputs();
2814 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2815 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2816 flatBufferInputShape->begin() + flatBufferInputShape->size());
2818 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2821 for (
unsigned int i=0; i<inputs.size(); ++i)
2824 if (descriptor.m_InputShape != inputShape)
2826 std::stringstream ss;
2827 ss <<
"Shape of input " 2831 <<
" does not equal defined input shape " 2832 << descriptor.m_InputShape
2840 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2845 RegisterInputSlots(graph, layerIndex, layer);
2846 RegisterOutputSlots(graph, layerIndex, layer);
2849 void Deserializer::ParseStandIn(
GraphPtr graph,
unsigned int layerIndex)
2853 auto inputs =
GetInputs(graph, layerIndex);
2854 auto outputs =
GetOutputs(graph, layerIndex);
2856 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
2857 auto fbDescriptor = fbLayer->descriptor();
2860 descriptor.
m_NumInputs = fbDescriptor->numInputs();
2866 const std::string layerName =
GetLayerName(graph, layerIndex);
2869 for (
unsigned int i = 0u; i < descriptor.
m_NumOutputs; ++i)
2875 RegisterInputSlots(graph, layerIndex, layer);
2876 RegisterOutputSlots(graph, layerIndex, layer);
armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
static armnn::LstmDescriptor GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
float m_Eps
Used to avoid dividing by zero.
virtual unsigned int GetNumOutputSlots() const =0
Returns the number of connectable output slots.
bool m_ProjectionEnabled
Enable/disable the projection layer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
UnaryOperation m_Operation
Specifies the elementwiseUnary operation to execute.
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
float m_ScaleW
Center size encoding scale weight.
#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
static GraphPtr LoadGraphFromBinary(const uint8_t *binaryContent, size_t len)
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
float m_K
Kappa value used for the across channel normalization equation.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
const TensorShape & GetShape() const
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_PadLeft
Padding left value in the width dimension.
float m_ClippingThresProj
Clipping threshold value for the projection.
std::string AsString() const
static armnn::NormalizationDescriptor GetNormalizationDescriptor(NormalizationDescriptorPtr normalizationDescriptor, unsigned int layerIndex)
A ReshapeDescriptor for the ReshapeLayer.
static void Destroy(IDeserializer *parser)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
A ComparisonDescriptor for the ComparisonLayer.
float m_ScaleX
Center size encoding scale x.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
uint32_t m_PoolWidth
Pooling width value.
#define CHECK_TENSOR_PTR(TENSOR_PTR)
A Convolution2dDescriptor for the Convolution2dLayer.
float m_Alpha
Alpha value for the normalization equation.
const armnnSerializer::TensorInfo * TensorRawPtr
const armnnSerializer::NormalizationDescriptor * NormalizationDescriptorPtr
uint32_t m_PadLeft
Padding left value in the width dimension.
bool m_BiasEnabled
Enable/disable bias.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
std::vector< unsigned int > m_Size
Size of the slice in each dimension.
The padding fields don't count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
void CheckLayers(Graph &graph)
static IDeserializerPtr Create()
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_PadRight
Padding right value in the width dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
Copyright (c) 2020 ARM Limited.
void IgnoreUnused(Ts &&...)
armnn::INetworkPtr CreateNetworkFromBinary(const std::vector< uint8_t > &binaryContent) override
Create an input network from binary file contents.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
#define CHECK_GRAPH(GRAPH, LAYERS_INDEX)
static std::string GetLayerName(const GraphPtr &graph, unsigned int index)
uint32_t m_DilationY
Dilation along y axis.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
std::vector< std::pair< unsigned int, unsigned int > > m_PadList
Specifies the padding values for the input dimension: heightPad{top, bottom} widthPad{left, right}.
uint32_t m_DilationY
Dilation factor value for height dimension.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
BindingPointInfo GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const override
Retrieve binding info (layer id and tensor info) for the network input identified by the given layer ...
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
uint32_t m_NumOutputs
Number of output tensors.
NormalizationAlgorithmMethod m_NormMethodType
Normalization method algorithm to use (LocalBrightness, LocalContrast).
void SetShape(const TensorShape &newShape)
A ResizeDescriptor for the ResizeLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
TensorShape m_TargetShape
Target shape value.
uint32_t m_PoolHeight
Pooling height value.
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
static int32_t GetBindingLayerInfo(const GraphPtr &graphPtr, unsigned int layerIndex)
const armnnSerializer::Pooling2dDescriptor * PoolingDescriptor
#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE)
armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
bool CheckShape(const armnn::TensorShape &actual, const std::vector< uint32_t > &expected)
float m_NmsIouThreshold
Intersection over union threshold.
An LstmDescriptor for the LstmLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
std::string FileLine() const
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views.
std::vector< unsigned int > m_Begin
Beginning indices of the slice in each dimension.
bool m_KeepDims
Enable/disable keep dimensions. If true, then the reduced dimensions that are of length 1 are kept...
armnnSerializer::TensorInfo * TensorRawPtr
std::vector< unsigned int > m_BlockShape
Block shape values.
float m_Eps
Epsilon, small scalar value added to variance to avoid dividing by zero. Defaults to 1e-12f...
An output connection slot for a layer.
A L2NormalizationDescriptor for the L2NormalizationLayer.
An ArgMinMaxDescriptor for ArgMinMaxLayer.
An OriginsDescriptor for the ConcatLayer.
static LayerBaseRawPtr GetBaseLayer(const GraphPtr &graphPtr, unsigned int layerIndex)
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
uint32_t m_TargetWidth
Target width value.
#define CHECK_VALID_SIZE(ACTUAL,...)
bool m_PeepholeEnabled
Enable/disable peephole.
uint32_t m_NumClasses
Number of classes.
#define CHECKED_NON_NEGATIVE(VALUE)
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> IDeserializerPtr
armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
uint32_t m_PadTop
Padding top value in the height dimension.
armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
A StandInDescriptor for the StandIn layer.
#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR)
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
bool m_UseRegularNms
Use Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
BindingPointInfo GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const override
Retrieve binding info (layer id and tensor info) for the network output identified by the given layer...
std::vector< unsigned int > m_BlockShape
Block shape value.
An ActivationDescriptor for the ActivationLayer.
min(a, max(b, input)) ReLu1 & ReLu6.
std::vector< TensorRawPtr > TensorRawPtrVector
uint32_t m_TargetHeight
Target height value.
uint32_t m_ActivationFunc
The activation function to use.
A SliceDescriptor for the SliceLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
float m_ClippingThresCell
Clipping threshold value for the cell state.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
static TensorRawPtrVector GetOutputs(const GraphPtr &graph, unsigned int layerIndex)
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
float m_Beta
Beta, the offset scalar value applied for the normalized tensor. Defaults to 1.0. ...
armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
float m_ScaleH
Center size encoding scale height.
ComparisonOperation m_Operation
Specifies the comparison operation to execute.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
NormalizationAlgorithmChannel m_NormChannelType
Normalization channel algorithm to use (Across, Within).
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH).
uint32_t m_DilationX
Dilation along x axis.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
bool m_CifgEnabled
Enable/disable cifg (coupled input & forget gate).
armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
const armnnSerializer::SerializedGraph * GraphPtr
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
static IDeserializer * CreateRaw()
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
The padding fields count, but are ignored.
std::vector< std::pair< unsigned int, unsigned int > > m_Crops
The values to crop from the input dimension.
Base class for all ArmNN exceptions so that users can filter to just those.
const armnnSerializer::ConstTensor * ConstTensorRawPtr
unsigned int GetNumDimensions() const
Jarret 2009: Local Contrast Normalization.
const armnnSerializer::LstmDescriptor * LstmDescriptorPtr
OutputShapeRounding m_OutputShapeRounding
The rounding method for the output shape. (Floor, Ceiling).
uint32_t m_NumInputs
Number of input tensors.
static armnn::Pooling2dDescriptor GetPoolingDescriptor(PoolingDescriptor pooling2dDescriptor, unsigned int layerIndex)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
static TensorRawPtrVector GetInputs(const GraphPtr &graph, unsigned int layerIndex)
bool m_LayerNormEnabled
Enable/disable layer normalization.
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
int m_Axis
Axis to reduce across the input tensor.
float m_ScaleY
Center size encoding scale y.
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
virtual int Connect(IInputSlot &destination)=0
Krichevsky 2012: Local Brightness Normalization.
A Pooling2dDescriptor for the Pooling2dLayer.
const armnnSerializer::LayerBase * LayerBaseRawPtr
A NormalizationDescriptor for the NormalizationLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
static armnn::TensorInfo OutputShapeOfReshape(const armnn::TensorInfo &inputTensorInfo, const std::vector< uint32_t > &targetDimsIn)
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
A SoftmaxDescriptor for the SoftmaxLayer.
float m_Beta
Beta value for the normalization equation.
const armnnSerializer::OriginsDescriptor * GetOriginsDescriptor(const armnnSerializer::SerializedGraph *graph, unsigned int layerIndex)
uint32_t m_NormSize
Depth radius value.
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
Set the view origin coordinates.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square).
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
unsigned int GetNumElements() const
A PermuteDescriptor for the PermuteLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
std::vector< float > anchors({ 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 100.5f, 1.0f, 1.0f })