24 #include <fmt/format.h> 32 using namespace armnn;
38 IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
40 IDeserializer::~IDeserializer() =
default;
57 armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(
const std::vector<uint8_t> &binaryContent)
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
67 BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(
unsigned int layerId,
const std::string &name)
const 69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
72 BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(
unsigned int layerId,
const std::string &name)
const 74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
80 const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
82 void CheckGraph(
const GraphPtr& graph,
83 unsigned int layersIndex,
86 if (graph->layers() ==
nullptr)
88 throw ParseException(fmt::format(
"{0} was called with invalid (null) graph. " 89 "Possible reason is that the graph is not yet loaded and Unpack(ed). " 95 else if (layersIndex >= graph->layers()->size())
97 throw ParseException(fmt::format(
"{0} was called with an invalid layers index. layers:{1} at {2}",
105 unsigned int layersIndex,
106 unsigned int layerIndex,
109 if (graph->layers() ==
nullptr)
111 throw ParseException(fmt::format(
"{0} was called with invalid (null) graph. " 112 "Possible reason is that the graph is not yet loaded and Unpack(ed). " 118 else if (layersIndex >= graph->layers()->size())
120 throw ParseException(fmt::format(
"{0} was called with an invalid layers index. " 126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
129 throw ParseException(fmt::format(
"{0} was called with an invalid layer index. " 130 "layers:{1} layer:{2} at {3}",
141 if (rawPtr ==
nullptr)
143 throw ParseException(fmt::format(
"{0} was called with a null tensor pointer. at {1}",
152 if (rawPtr ==
nullptr)
154 throw ParseException(fmt::format(
"{0} was called with a null const tensor pointer. at {1}",
160 void CheckConstTensorSize(
const unsigned int constTensorSize,
161 const unsigned int tensorSize,
164 if (constTensorSize != tensorSize)
166 throw ParseException(fmt::format(
"{0} wrong number of components supplied to tensor. at:{1}",
172 #define CHECK_TENSOR_PTR(TENSOR_PTR) \ 173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION()) 175 #define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \ 176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION()) 178 #define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \ 179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION()) 181 #define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \ 182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION()) 184 #define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \ 185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION()) 191 if (actualSize != expected.size())
196 for (
unsigned int i = 0u; i < actualSize; i++)
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
207 IDeserializer::DeserializerImpl::DeserializerImpl()
208 : m_Network(nullptr, nullptr),
283 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
288 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
290 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
292 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
294 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
296 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
298 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
300 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
302 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
304 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
308 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
310 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
312 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
314 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
316 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
322 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
324 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
326 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
328 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
330 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
332 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
334 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
336 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
338 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
340 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
342 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
344 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
346 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
348 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
350 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
352 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
354 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
356 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
360 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
362 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
364 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
366 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
368 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
370 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
372 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
374 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
376 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
378 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
380 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
382 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
384 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
386 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
388 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
390 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
394 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
396 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
398 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
400 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
402 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
404 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
406 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
408 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
410 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
412 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
414 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
416 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
418 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
420 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
422 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
425 throw ParseException(fmt::format(
"Layer type {} not recognized", layerType));
433 return layer->layerName()->str();
438 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
442 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
446 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
618 switch (tensorPtr->dataType())
652 throw ParseException(fmt::format(
"Unsupported data type {0} = {1}. {2}",
653 tensorPtr->dataType(),
659 float quantizationScale = tensorPtr->quantizationScale();
660 int32_t quantizationOffset = tensorPtr->quantizationOffset();
662 if (tensorPtr->dimensionality() ==
static_cast<unsigned int>(Dimensionality::Scalar))
669 else if (tensorPtr->dimensionality() ==
static_cast<unsigned int>(Dimensionality::NotSpecified))
678 auto dimensions = tensorPtr->dimensions();
679 unsigned int size = dimensions->size();
680 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
685 if (tensorPtr->dimensionSpecificity() !=
nullptr)
687 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
688 size = dimensionSpecificity->size();
689 for (
unsigned int i = 0; i < size; ++i)
691 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
695 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
697 auto quantizationScales = tensorPtr->quantizationScales();
698 if (quantizationScales)
700 unsigned int quantizationScalesSize = quantizationScales->size();
701 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
702 unsigned int quantizationDim = tensorPtr->quantizationDim();
725 switch (constTensorPtr->data_type())
729 auto byteData = constTensorPtr->data_as_ByteData()->data();
735 auto shortData = constTensorPtr->data_as_ShortData()->data();
741 auto intData = constTensorPtr->data_as_IntData()->data();
747 auto longData = constTensorPtr->data_as_LongData()->data();
754 throw ParseException(fmt::format(
"Unsupported data type {0} = {1}. {2}",
755 constTensorPtr->data_type(),
766 const auto& numInputs = layer->inputSlots()->size();
770 for (
unsigned int i=0; i<numInputs; ++i)
773 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
774 result[i] =
GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
783 const auto& numOutputs = layer->outputSlots()->size();
787 for (
unsigned int i=0; i<numOutputs; ++i)
789 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
794 void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(
GraphPtr graph,
unsigned int layerIndex)
797 const auto layerName =
GetBaseLayer(graph, layerIndex)->layerName()->c_str();
798 throw ParseException(fmt::format(
"Layer not supported. layerIndex: {0} " 799 "layerName: {1} / {2}",
805 void IDeserializer::DeserializerImpl::ResetParser()
808 m_InputBindings.clear();
809 m_OutputBindings.clear();
817 return CreateNetworkFromGraph(graph);
823 if (binaryContent.fail()) {
827 binaryContent.seekg(0, std::ios::end);
828 const std::streamoff size = binaryContent.tellg();
829 std::vector<char> content(static_cast<size_t>(size));
830 binaryContent.seekg(0);
831 binaryContent.read(content.data(),
static_cast<std::streamsize
>(size));
833 return CreateNetworkFromGraph(graph);
838 if (binaryContent ==
nullptr)
843 flatbuffers::Verifier verifier(binaryContent, len);
844 if (verifier.VerifyBuffer<SerializedGraph>() ==
false)
846 throw ParseException(fmt::format(
"Buffer doesn't conform to the expected Armnn " 847 "flatbuffers format. size:{0} {1}",
856 m_Network = INetwork::Create();
858 unsigned int layerIndex = 0;
859 for (AnyLayer
const* layer : *graph->layers())
865 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
866 (this->*parserFunction)(graph, layerIndex);
871 SetupInputLayers(graph);
872 SetupOutputLayers(graph);
875 for (
auto&& graphIt : m_GraphConnections)
877 Connections& connections = graphIt.second;
878 for (
auto&& outputIt : connections.outputSlots)
880 const unsigned int outputSlotIndex = outputIt.first;
882 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
884 for (
IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
886 outputSlot->
Connect(*inputSlot);
892 return std::move(m_Network);
896 const std::string& name)
const 899 for (
auto inputBinding : m_InputBindings)
901 if (inputBinding.first == name)
903 return inputBinding.second;
906 throw ParseException(fmt::format(
"No input binding found for layer:{0} / {1}",
912 const std::string& name)
const 915 for (
auto outputBinding : m_OutputBindings)
917 if (outputBinding.first == name)
919 return outputBinding.second;
922 throw ParseException(fmt::format(
"No output binding found for layer:{0} / {1}",
927 unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(
GraphPtr graph,
int targetId)
929 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
931 auto layer = graph->layers()->Get(i);
934 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
935 if (layerBindingId == targetId)
941 throw ParseException(
"Input layer with given layerBindingId not found");
944 unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(
GraphPtr graph,
int targetId)
946 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
948 auto layer = graph->layers()->Get(i);
951 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
952 if (layerBindingId == targetId)
958 throw ParseException(
"Output layer with given layerBindingId not found");
961 unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(
GraphPtr graph,
unsigned int targetIndex)
963 for (
unsigned int i = 0; i < graph->layers()->size(); i++)
966 if (layer->index() == targetIndex)
974 IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(
GraphPtr graph)
976 IDeserializer::DeserializerImpl::FeatureVersions versions;
978 if (graph->featureVersions())
980 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
981 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
982 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
988 void IDeserializer::DeserializerImpl::SetupInputLayers(
GraphPtr graph)
991 const unsigned int numInputs = graph->inputIds()->size();
992 m_InputBindings.clear();
993 m_InputBindings.reserve(numInputs);
995 for (
unsigned int i = 0; i < numInputs; i++)
997 unsigned int inputLayerIndex = 0xFFFFFFFF;
998 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1000 const unsigned int inputId =
armnn::numeric_cast<
unsigned int>(graph->inputIds()->Get(i));
1001 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1005 const int inputId = graph->inputIds()->Get(i);
1006 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1016 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
1019 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1020 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1023 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
1027 void IDeserializer::DeserializerImpl::SetupOutputLayers(
GraphPtr graph)
1030 const unsigned int numOutputs = graph->outputIds()->size();
1031 m_OutputBindings.clear();
1032 m_OutputBindings.reserve(numOutputs);
1034 for (
unsigned int i = 0; i < numOutputs; i++)
1036 unsigned int outputLayerIndex = 0xFFFFFFFF;
1037 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1039 const unsigned int outputId =
armnn::numeric_cast<
unsigned int>(graph->outputIds()->Get(i));
1040 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1044 const int outputId = graph->outputIds()->Get(i);
1045 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1055 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
1057 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
1058 unsigned int sourceLayerIndex =
1059 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
1060 unsigned int outputSlotIndex =
1061 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
1064 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
1066 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
1070 void IDeserializer::DeserializerImpl::RegisterOutputSlots(
GraphPtr graph,
1071 uint32_t layerIndex,
1079 throw ParseException(fmt::format(
"The number of outputslots ({0}) does not match the number expected ({1})" 1080 " for layer index: {2} {3}",
1081 baseLayer->outputSlots()->size(),
1089 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1092 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
1096 void IDeserializer::DeserializerImpl::RegisterInputSlots(
GraphPtr graph,
1097 uint32_t layerIndex,
1099 std::vector<unsigned int> ignoreSlots)
1105 if (baseLayer->inputSlots()->size() != (layer->
GetNumInputSlots() - ignoreSlots.size()))
1107 throw ParseException(fmt::format(
"The number of inputslots ({0}) does not match the number expected ({1})" 1108 " for layer index:{2} {3}",
1109 baseLayer->inputSlots()->size(),
1118 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1120 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1121 auto fbConnection = fbInputSlot->connection();
1123 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1128 void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
1129 uint32_t outputSlotIndex,
1132 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1134 m_GraphConnections[sourceLayerIndex] = Connections();
1137 Connections& connections = m_GraphConnections[sourceLayerIndex];
1138 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1140 connections.inputSlots[outputSlotIndex] = {inputSlot};
1144 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
1148 void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
1149 uint32_t outputSlotIndex,
1152 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1154 m_GraphConnections[sourceLayerIndex] = Connections();
1157 Connections& connections = m_GraphConnections[sourceLayerIndex];
1158 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1163 connections.outputSlots[outputSlotIndex] = outputSlot;
1166 void IDeserializer::DeserializerImpl::ParseAbs(
GraphPtr graph,
unsigned int layerIndex)
1169 auto inputs =
GetInputs(graph, layerIndex);
1173 auto outputs =
GetOutputs(graph, layerIndex);
1179 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1183 RegisterInputSlots(graph, layerIndex, layer);
1184 RegisterOutputSlots(graph, layerIndex, layer);
1187 void IDeserializer::DeserializerImpl::ParseActivation(
GraphPtr graph,
unsigned int layerIndex)
1190 auto inputs =
GetInputs(graph, layerIndex);
1194 auto outputs =
GetOutputs(graph, layerIndex);
1197 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
1199 auto serializerDescriptor = serializerLayer->descriptor();
1203 descriptor.
m_A = serializerDescriptor->a();
1204 descriptor.
m_B = serializerDescriptor->b();
1211 RegisterInputSlots(graph, layerIndex, layer);
1212 RegisterOutputSlots(graph, layerIndex, layer);
1215 void IDeserializer::DeserializerImpl::ParseAdd(
GraphPtr graph,
unsigned int layerIndex)
1218 auto inputs =
GetInputs(graph, layerIndex);
1222 auto outputs =
GetOutputs(graph, layerIndex);
1231 RegisterInputSlots(graph, layerIndex, layer);
1232 RegisterOutputSlots(graph, layerIndex, layer);
1235 void IDeserializer::DeserializerImpl::ParseArgMinMax(
GraphPtr graph,
unsigned int layerIndex)
1238 auto inputs =
GetInputs(graph, layerIndex);
1242 auto outputs =
GetOutputs(graph, layerIndex);
1245 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1246 auto serializerDescriptor = serializerLayer->descriptor();
1250 descriptor.
m_Axis = serializerDescriptor->axis();
1252 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1257 RegisterInputSlots(graph, layerIndex, layer);
1258 RegisterOutputSlots(graph, layerIndex, layer);
1261 void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(
GraphPtr graph,
unsigned int layerIndex)
1271 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1272 auto flatBufferCrops = flatBufferDescriptor->crops();
1273 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1275 if (flatBufferCrops->Length() % 2 != 0)
1280 std::vector<std::pair<unsigned int, unsigned int>> crops;
1281 crops.reserve(flatBufferCrops->Length() / 2);
1282 for (
unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1284 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1290 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1294 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1299 RegisterInputSlots(graph, layerIndex, layer);
1300 RegisterOutputSlots(graph, layerIndex, layer);
1303 void IDeserializer::DeserializerImpl::ParseBatchNormalization(
GraphPtr graph,
unsigned int layerIndex)
1307 auto inputs =
GetInputs(graph, layerIndex);
1310 auto outputs =
GetOutputs(graph, layerIndex);
1316 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1317 auto serializerDescriptor = serializerLayer->descriptor();
1320 descriptor.
m_Eps = serializerDescriptor->eps();
1334 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1336 RegisterInputSlots(graph, layerIndex, layer);
1337 RegisterOutputSlots(graph, layerIndex, layer);
1340 void IDeserializer::DeserializerImpl::ParseCast(
GraphPtr graph,
unsigned int layerIndex)
1357 RegisterInputSlots(graph, layerIndex, layer);
1358 RegisterOutputSlots(graph, layerIndex, layer);
1361 void IDeserializer::DeserializerImpl::ParseConstant(
GraphPtr graph,
unsigned int layerIndex)
1366 auto outputs =
GetOutputs(graph, layerIndex);
1371 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1372 auto serializerInput = serializerLayer->input();
1381 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1387 std::unique_ptr<unsigned char[]> permuteBuffer(
new unsigned char[weightsInfo.
GetNumBytes()]);
1394 auto weightsShape = weightsInfo.GetShape();
1395 weightsInfo.SetShape({1,
1398 weightsShape[2]*weightsShape[3]});
1402 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1404 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1406 RegisterOutputSlots(graph, layerIndex, layer);
1412 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1418 RegisterOutputSlots(graph, layerIndex, layer);
1421 void IDeserializer::DeserializerImpl::ParseConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
1424 auto inputs =
GetInputs(graph, layerIndex);
1427 auto outputs =
GetOutputs(graph, layerIndex);
1430 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1433 auto flatbufferDescriptor = flatBufferLayer->descriptor();
1436 descriptor.
m_PadLeft = flatbufferDescriptor->padLeft();
1437 descriptor.
m_PadRight = flatbufferDescriptor->padRight();
1438 descriptor.
m_PadTop = flatbufferDescriptor->padTop();
1439 descriptor.
m_PadBottom = flatbufferDescriptor->padBottom();
1440 descriptor.
m_StrideX = flatbufferDescriptor->strideX();
1441 descriptor.
m_StrideY = flatbufferDescriptor->strideY();;
1442 descriptor.
m_DilationX = flatbufferDescriptor->dilationX();
1443 descriptor.
m_DilationY = flatbufferDescriptor->dilationY();;
1444 descriptor.
m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1448 std::vector<unsigned int> ignoreSlots {};
1453 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
1459 layer = m_Network->AddConvolution2dLayer(descriptor,
1463 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1464 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1465 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.
GetInfo());
1466 ignoreSlots.emplace_back(1u);
1471 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1472 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1473 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.
GetInfo());
1474 ignoreSlots.emplace_back(2u);
1479 layer = m_Network->AddConvolution2dLayer(descriptor,
1486 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1488 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
1489 RegisterOutputSlots(graph, layerIndex, layer);
1492 void IDeserializer::DeserializerImpl::ParseConvolution3d(
GraphPtr graph,
unsigned int layerIndex)
1495 auto inputs =
GetInputs(graph, layerIndex);
1498 auto outputs =
GetOutputs(graph, layerIndex);
1501 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1503 auto serializerDescriptor = serializerLayer->descriptor();
1506 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
1507 descriptor.
m_PadRight = serializerDescriptor->padRight();
1508 descriptor.
m_PadTop = serializerDescriptor->padTop();
1509 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
1510 descriptor.
m_PadFront = serializerDescriptor->padFront();
1511 descriptor.
m_PadBack = serializerDescriptor->padBack();
1512 descriptor.
m_StrideX = serializerDescriptor->strideX();
1513 descriptor.
m_StrideY = serializerDescriptor->strideY();
1514 descriptor.
m_StrideZ = serializerDescriptor->strideZ();
1515 descriptor.
m_DilationX = serializerDescriptor->dilationX();
1516 descriptor.
m_DilationY = serializerDescriptor->dilationY();
1517 descriptor.
m_DilationZ = serializerDescriptor->dilationZ();
1518 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();
1524 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1527 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1529 RegisterInputSlots(graph, layerIndex, layer);
1530 RegisterOutputSlots(graph, layerIndex, layer);
1533 void IDeserializer::DeserializerImpl::ParseDepthToSpace(
GraphPtr graph,
unsigned int layerIndex)
1537 auto inputs =
GetInputs(graph, layerIndex);
1540 auto outputs =
GetOutputs(graph, layerIndex);
1543 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1546 descriptor.
m_BlockSize = fbDescriptor->blockSize();
1550 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1555 RegisterInputSlots(graph, layerIndex, layer);
1556 RegisterOutputSlots(graph, layerIndex, layer);
1559 void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
1562 auto inputs =
GetInputs(graph, layerIndex);
1565 auto outputs =
GetOutputs(graph, layerIndex);
1568 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
1570 auto serializerDescriptor = serializerLayer->descriptor();
1573 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
1574 descriptor.
m_PadRight = serializerDescriptor->padRight();
1575 descriptor.
m_PadTop = serializerDescriptor->padTop();
1576 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
1577 descriptor.
m_StrideX = serializerDescriptor->strideX();
1578 descriptor.
m_StrideY = serializerDescriptor->strideY();
1579 descriptor.
m_DilationX = serializerDescriptor->dilationX();
1580 descriptor.
m_DilationY = serializerDescriptor->dilationY();
1581 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();
1585 std::vector<unsigned int> ignoreSlots {};
1589 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
1596 ignoreSlots.emplace_back(1u);
1598 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1605 ignoreSlots.emplace_back(2u);
1607 auto biasLayer = m_Network->AddConstantLayer(biases);
1608 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1609 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.
GetInfo());
1612 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1618 std::unique_ptr<unsigned char[]> permuteBuffer(
new unsigned char[weightsInfo.
GetNumBytes()]);
1621 weights.GetMemoryArea(), permuteBuffer.get(),
1625 auto weightsShape = weightsInfo.GetShape();
1626 weightsInfo.SetShape({1,
1629 weightsShape[2]*weightsShape[3]});
1633 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1634 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1635 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1639 auto weightsLayer = m_Network->AddConstantLayer(weights);
1640 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1641 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1646 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1653 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1655 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
1656 RegisterOutputSlots(graph, layerIndex, layer);
1659 void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(
GraphPtr graph,
unsigned int layerIndex)
1662 auto inputs =
GetInputs(graph, layerIndex);
1666 auto outputs =
GetOutputs(graph, layerIndex);
1669 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1671 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1679 descriptor.
m_NumClasses = flatBufferDescriptor->numClasses();
1681 descriptor.
m_ScaleX = flatBufferDescriptor->scaleX();
1682 descriptor.
m_ScaleY = flatBufferDescriptor->scaleY();
1683 descriptor.
m_ScaleW = flatBufferDescriptor->scaleW();
1684 descriptor.
m_ScaleH = flatBufferDescriptor->scaleH();
1692 for (
unsigned int i = 0; i < 4; i++)
1694 layer->GetOutputSlot(i).SetTensorInfo(
ToTensorInfo(outputs[i]));
1697 RegisterInputSlots(graph, layerIndex, layer);
1698 RegisterOutputSlots(graph, layerIndex, layer);
1701 void IDeserializer::DeserializerImpl::ParseDivision(
GraphPtr graph,
unsigned int layerIndex)
1704 auto inputs =
GetInputs(graph, layerIndex);
1708 auto outputs =
GetOutputs(graph, layerIndex);
1717 RegisterInputSlots(graph, layerIndex, layer);
1718 RegisterOutputSlots(graph, layerIndex, layer);
1721 void IDeserializer::DeserializerImpl::ParseEqual(
GraphPtr graph,
unsigned int layerIndex)
1724 auto inputs =
GetInputs(graph, layerIndex);
1728 auto outputs =
GetOutputs(graph, layerIndex);
1733 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1738 RegisterInputSlots(graph, layerIndex, layer);
1739 RegisterOutputSlots(graph, layerIndex, layer);
1742 void IDeserializer::DeserializerImpl::ParseFill(
GraphPtr graph,
unsigned int layerIndex)
1745 auto inputs =
GetInputs(graph, layerIndex);
1749 auto outputs =
GetOutputs(graph, layerIndex);
1754 descriptor.
m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
1755 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1760 RegisterInputSlots(graph, layerIndex, layer);
1761 RegisterOutputSlots(graph, layerIndex, layer);
1764 void IDeserializer::DeserializerImpl::ParseGreater(
GraphPtr graph,
unsigned int layerIndex)
1767 auto inputs =
GetInputs(graph, layerIndex);
1771 auto outputs =
GetOutputs(graph, layerIndex);
1776 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1781 RegisterInputSlots(graph, layerIndex, layer);
1782 RegisterOutputSlots(graph, layerIndex, layer);
1785 void IDeserializer::DeserializerImpl::ParseInstanceNormalization(
GraphPtr graph,
unsigned int layerIndex)
1789 auto inputs =
GetInputs(graph, layerIndex);
1792 auto outputs =
GetOutputs(graph, layerIndex);
1795 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1796 auto fbDescriptor = fbLayer->descriptor();
1799 descriptor.
m_Gamma = fbDescriptor->gamma();
1800 descriptor.
m_Beta = fbDescriptor->beta();
1801 descriptor.
m_Eps = fbDescriptor->eps();
1804 const std::string layerName =
GetLayerName(graph, layerIndex);
1807 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1810 RegisterInputSlots(graph, layerIndex, layer);
1811 RegisterOutputSlots(graph, layerIndex, layer);
1814 void IDeserializer::DeserializerImpl::ParseL2Normalization(
GraphPtr graph,
unsigned int layerIndex)
1818 auto inputs =
GetInputs(graph, layerIndex);
1821 auto outputs =
GetOutputs(graph, layerIndex);
1825 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1826 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1831 descriptor.
m_Eps = flatBufferDescriptor->eps();
1833 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1836 RegisterInputSlots(graph, layerIndex, layer);
1837 RegisterOutputSlots(graph, layerIndex, layer);
1840 void IDeserializer::DeserializerImpl::ParseLogicalBinary(
GraphPtr graph,
unsigned int layerIndex)
1845 auto inputs =
GetInputs(graph, layerIndex);
1848 auto outputs =
GetOutputs(graph, layerIndex);
1851 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1852 auto fbDescriptor = fbLayer->descriptor();
1857 const std::string& layerName =
GetLayerName(graph, layerIndex);
1858 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1863 RegisterInputSlots(graph, layerIndex, layer);
1864 RegisterOutputSlots(graph, layerIndex, layer);
1867 void IDeserializer::DeserializerImpl::ParseLogSoftmax(
GraphPtr graph,
unsigned int layerIndex)
1878 descriptor.
m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1879 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1882 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1887 RegisterInputSlots(graph, layerIndex, layer);
1888 RegisterOutputSlots(graph, layerIndex, layer);
1891 void IDeserializer::DeserializerImpl::ParseMinimum(
GraphPtr graph,
unsigned int layerIndex)
1894 auto inputs =
GetInputs(graph, layerIndex);
1898 auto outputs =
GetOutputs(graph, layerIndex);
1907 RegisterInputSlots(graph, layerIndex, layer);
1908 RegisterOutputSlots(graph, layerIndex, layer);
1911 void IDeserializer::DeserializerImpl::ParseMaximum(
GraphPtr graph,
unsigned int layerIndex)
1914 auto inputs =
GetInputs(graph, layerIndex);
1918 auto outputs =
GetOutputs(graph, layerIndex);
1927 RegisterInputSlots(graph, layerIndex, layer);
1928 RegisterOutputSlots(graph, layerIndex, layer);
1932 unsigned int layerIndex)
1934 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1939 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1941 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1946 void IDeserializer::DeserializerImpl::ParseChannelShuffle(
GraphPtr graph,
unsigned int layerIndex)
1957 descriptor.
m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
1958 descriptor.m_NumGroups =
1959 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
1962 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
1967 RegisterInputSlots(graph, layerIndex, layer);
1968 RegisterOutputSlots(graph, layerIndex, layer);
1970 void IDeserializer::DeserializerImpl::ParseComparison(
GraphPtr graph,
unsigned int layerIndex)
1975 auto inputs =
GetInputs(graph, layerIndex);
1978 auto outputs =
GetOutputs(graph, layerIndex);
1981 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1982 auto fbDescriptor = fbLayer->descriptor();
1987 const std::string& layerName =
GetLayerName(graph, layerIndex);
1988 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1993 RegisterInputSlots(graph, layerIndex, layer);
1994 RegisterOutputSlots(graph, layerIndex, layer);
1997 void IDeserializer::DeserializerImpl::ParseElementwiseUnary(
GraphPtr graph,
unsigned int layerIndex)
2002 auto inputs =
GetInputs(graph, layerIndex);
2005 auto outputs =
GetOutputs(graph, layerIndex);
2008 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2009 auto fbDescriptor = fbLayer->descriptor();
2014 const std::string& layerName =
GetLayerName(graph, layerIndex);
2015 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2020 RegisterInputSlots(graph, layerIndex, layer);
2021 RegisterOutputSlots(graph, layerIndex, layer);
2024 void IDeserializer::DeserializerImpl::ParseConcat(
GraphPtr graph,
unsigned int layerIndex)
2029 auto outputs =
GetOutputs(graph, layerIndex);
2034 unsigned int numViews = originsDescriptor->numViews();
2035 unsigned int numDimensions = originsDescriptor->numDimensions();
2038 auto inputs =
GetInputs(graph, layerIndex);
2042 auto originsPtr = originsDescriptor->viewOrigins();
2043 for (
unsigned int v = 0; v < numViews; ++v)
2045 auto originPtr = originsPtr->Get(v);
2046 for (
unsigned int d = 0; d < numDimensions; ++d)
2048 uint32_t value = originPtr->data()->Get(d);
2049 descriptor.SetViewOriginCoord(v, d, value);
2052 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
2054 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
2058 RegisterInputSlots(graph, layerIndex, layer);
2059 RegisterOutputSlots(graph, layerIndex, layer);
2062 void IDeserializer::DeserializerImpl::ParseMultiplication(
GraphPtr graph,
unsigned int layerIndex)
2065 auto inputs =
GetInputs(graph, layerIndex);
2069 auto outputs =
GetOutputs(graph, layerIndex);
2073 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
2078 RegisterInputSlots(graph, layerIndex, layer);
2079 RegisterOutputSlots(graph, layerIndex, layer);
2082 void IDeserializer::DeserializerImpl::ParseFloor(
GraphPtr graph,
unsigned int layerIndex)
2087 auto inputs =
GetInputs(graph, layerIndex);
2090 auto outputs =
GetOutputs(graph, layerIndex);
2097 layer = m_Network->AddFloorLayer(layerName.c_str());
2102 RegisterInputSlots(graph, layerIndex, layer);
2103 RegisterOutputSlots(graph, layerIndex, layer);
2106 void IDeserializer::DeserializerImpl::ParseFullyConnected(
GraphPtr graph,
unsigned int layerIndex)
2109 auto inputs =
GetInputs(graph, layerIndex);
2112 auto outputs =
GetOutputs(graph, layerIndex);
2115 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
2117 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2120 fullyConnectedDescriptor.
m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2122 fullyConnectedDescriptor.
m_ConstantWeights = flatBufferDescriptor->constantWeights();
2125 std::vector<unsigned int> ignoreSlots {};
2129 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
2134 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2138 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2139 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2140 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.
GetInfo());
2141 ignoreSlots.emplace_back(1u);
2146 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2147 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2148 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.
GetInfo());
2149 ignoreSlots.emplace_back(2u);
2154 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2156 uint32_t numInputs = fullyConnectedDescriptor.
GetNumInputs();
2163 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
2164 RegisterOutputSlots(graph, layerIndex, layer);
2167 void IDeserializer::DeserializerImpl::ParsePad(
GraphPtr graph,
unsigned int layerIndex)
2177 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2178 auto flatBufferPadList = flatBufferDescriptor->padList();
2179 auto paddingMode = flatBufferDescriptor->paddingMode();
2180 float padValue = flatBufferDescriptor->padValue();
2182 if (flatBufferPadList->Length() % 2 != 0)
2184 throw ParseException(fmt::format(
"The size of the pad list must be divisible by 2 {}",
2188 std::vector<std::pair<unsigned int, unsigned int>> padList;
2189 padList.reserve(flatBufferPadList->Length() / 2);
2190 for (
unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2192 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2198 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2203 RegisterInputSlots(graph, layerIndex, layer);
2204 RegisterOutputSlots(graph, layerIndex, layer);
2207 void IDeserializer::DeserializerImpl::ParsePermute(
GraphPtr graph,
unsigned int layerIndex)
2212 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
2214 auto inputs =
GetInputs(graph, layerIndex);
2217 auto outputs =
GetOutputs(graph, layerIndex);
2224 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
2227 RegisterInputSlots(graph, layerIndex, layer);
2228 RegisterOutputSlots(graph, layerIndex, layer);
2232 unsigned int layerIndex)
2237 switch (pooling2dDesc->poolType())
2260 switch (pooling2dDesc->outputShapeRounding())
2278 switch (pooling2dDesc->paddingMethod())
2296 switch (pooling2dDesc->dataLayout())
2315 desc.
m_PadLeft = pooling2dDesc->padLeft();
2317 desc.
m_PadTop = pooling2dDesc->padTop();
2318 desc.
m_StrideX = pooling2dDesc->strideX();
2319 desc.
m_StrideY = pooling2dDesc->strideY();
2327 unsigned int layerIndex)
2332 switch (pooling3dDesc->poolType())
2355 switch (pooling3dDesc->outputShapeRounding())
2373 switch (pooling3dDesc->paddingMethod())
2391 switch (pooling3dDesc->dataLayout())
2410 desc.
m_PadLeft = pooling3dDesc->padLeft();
2412 desc.
m_PadTop = pooling3dDesc->padTop();
2414 desc.
m_PadBack = pooling3dDesc->padBack();
2415 desc.
m_StrideX = pooling3dDesc->strideX();
2416 desc.
m_StrideY = pooling3dDesc->strideY();
2417 desc.
m_StrideZ = pooling3dDesc->strideZ();
2425 void IDeserializer::DeserializerImpl::ParsePooling2d(
GraphPtr graph,
unsigned int layerIndex)
2429 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
2430 auto inputs =
GetInputs(graph, layerIndex);
2433 auto outputs =
GetOutputs(graph, layerIndex);
2439 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
2442 RegisterInputSlots(graph, layerIndex, layer);
2443 RegisterOutputSlots(graph, layerIndex, layer);
2446 void IDeserializer::DeserializerImpl::ParsePooling3d(
GraphPtr graph,
unsigned int layerIndex)
2450 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2451 auto inputs =
GetInputs(graph, layerIndex);
2454 auto outputs =
GetOutputs(graph, layerIndex);
2460 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2463 RegisterInputSlots(graph, layerIndex, layer);
2464 RegisterOutputSlots(graph, layerIndex, layer);
2467 void IDeserializer::DeserializerImpl::ParseQuantize(
GraphPtr graph,
unsigned int layerIndex)
2471 auto inputs =
GetInputs(graph, layerIndex);
2474 auto outputs =
GetOutputs(graph, layerIndex);
2482 RegisterInputSlots(graph, layerIndex, layer);
2483 RegisterOutputSlots(graph, layerIndex, layer);
2487 const std::vector<uint32_t>& targetDimsIn)
2489 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2490 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2492 if (stretchDim != targetDimsIn.end())
2494 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2496 throw ParseException(fmt::format(
"At most one component of shape can be -1 {}",
2500 auto targetNumElements =
2502 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2504 auto stretchIndex =
static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2505 outputDims[stretchIndex] = inputTensorInfo.
GetNumElements() / targetNumElements;
2516 void IDeserializer::DeserializerImpl::ParseRank(
GraphPtr graph,
unsigned int layerIndex)
2532 RegisterInputSlots(graph, layerIndex, layer);
2533 RegisterOutputSlots(graph, layerIndex, layer);
2536 void IDeserializer::DeserializerImpl::ParseReduce(
GraphPtr graph,
unsigned int layerIndex)
2541 auto inputs =
GetInputs(graph, layerIndex);
2544 auto outputs =
GetOutputs(graph, layerIndex);
2547 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2548 auto fbDescriptor = fbLayer->descriptor();
2549 auto flatBufferAxis = fbDescriptor->axis();
2552 descriptor.
m_KeepDims = fbDescriptor->keepDims();
2553 descriptor.
m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2556 const std::string& layerName =
GetLayerName(graph, layerIndex);
2557 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2562 RegisterInputSlots(graph, layerIndex, layer);
2563 RegisterOutputSlots(graph, layerIndex, layer);
2566 void IDeserializer::DeserializerImpl::ParseReshape(
GraphPtr graph,
unsigned int layerIndex)
2569 auto inputs =
GetInputs(graph, layerIndex);
2571 auto outputs =
GetOutputs(graph, layerIndex);
2577 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
2578 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2581 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2583 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2584 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2586 if (inputs.size() > 1 && !
CheckShape(reshapeOutputTensorShape, expectedDims))
2588 std::stringstream ss;
2589 ss <<
"New shape defined in reshape parameters " 2590 << reshapeOutputTensorShape
2591 <<
" does not equal output shape " 2592 << actualOutputTensorInfo.
GetShape()
2602 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2605 RegisterInputSlots(graph, layerIndex, layer);
2606 RegisterOutputSlots(graph, layerIndex, layer);
2609 void IDeserializer::DeserializerImpl::ParseResize(
GraphPtr graph,
unsigned int layerIndex)
2619 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2622 descriptor.
m_TargetWidth = flatBufferDescriptor->targetWidth();
2623 descriptor.
m_TargetHeight = flatBufferDescriptor->targetHeight();
2626 descriptor.
m_AlignCorners = flatBufferDescriptor->alignCorners();
2630 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2635 RegisterInputSlots(graph, layerIndex, layer);
2636 RegisterOutputSlots(graph, layerIndex, layer);
2642 void IDeserializer::DeserializerImpl::ParseResizeBilinear(
GraphPtr graph,
unsigned int layerIndex)
2652 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2655 descriptor.
m_TargetWidth = flatBufferDescriptor->targetWidth();
2656 descriptor.
m_TargetHeight = flatBufferDescriptor->targetHeight();
2659 descriptor.
m_AlignCorners = flatBufferDescriptor->alignCorners();
2663 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2668 RegisterInputSlots(graph, layerIndex, layer);
2669 RegisterOutputSlots(graph, layerIndex, layer);
2672 void IDeserializer::DeserializerImpl::ParseShape(
GraphPtr graph,
unsigned int layerIndex)
2688 RegisterInputSlots(graph, layerIndex, layer);
2689 RegisterOutputSlots(graph, layerIndex, layer);
2692 void IDeserializer::DeserializerImpl::ParseSoftmax(
GraphPtr graph,
unsigned int layerIndex)
2703 descriptor.
m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
2704 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
2707 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2712 RegisterInputSlots(graph, layerIndex, layer);
2713 RegisterOutputSlots(graph, layerIndex, layer);
2716 void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(
GraphPtr graph,
unsigned int layerIndex)
2726 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2727 auto flatBufferPadList = flatBufferDescriptor->padList();
2728 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2730 if (flatBufferPadList->Length() % 2 != 0)
2732 throw ParseException(fmt::format(
"The size of the pad list must be divisible by 2 {}",
2736 std::vector<std::pair<unsigned int, unsigned int>> padList;
2737 padList.reserve(flatBufferPadList->Length() / 2);
2738 for (
unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2740 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2746 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2750 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2755 RegisterInputSlots(graph, layerIndex, layer);
2756 RegisterOutputSlots(graph, layerIndex, layer);
2759 void IDeserializer::DeserializerImpl::ParseSpaceToDepth(
GraphPtr graph,
unsigned int layerIndex)
2769 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2772 descriptor.
m_BlockSize = flatBufferDescriptor->blockSize();
2776 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2781 RegisterInputSlots(graph, layerIndex, layer);
2782 RegisterOutputSlots(graph, layerIndex, layer);
2787 unsigned int layerIndex)
2792 switch (normalizationDescriptor->normChannelType())
2810 switch (normalizationDescriptor->normMethodType())
2828 switch (normalizationDescriptor->dataLayout())
2846 desc.
m_Alpha = normalizationDescriptor->alpha();
2847 desc.
m_Beta = normalizationDescriptor->beta();
2848 desc.
m_K = normalizationDescriptor->k();
2849 desc.
m_NormSize = normalizationDescriptor->normSize();
2854 void IDeserializer::DeserializerImpl::ParseNormalization(
GraphPtr graph,
unsigned int layerIndex)
2858 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2871 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2874 RegisterInputSlots(graph, layerIndex, layer);
2875 RegisterOutputSlots(graph, layerIndex, layer);
2878 void IDeserializer::DeserializerImpl::ParseRsqrt(
GraphPtr graph,
unsigned int layerIndex)
2881 auto inputs =
GetInputs(graph, layerIndex);
2885 auto outputs =
GetOutputs(graph, layerIndex);
2891 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2895 RegisterInputSlots(graph, layerIndex, layer);
2896 RegisterOutputSlots(graph, layerIndex, layer);
2899 void IDeserializer::DeserializerImpl::ParseSlice(
GraphPtr graph,
unsigned int layerIndex)
2903 auto inputs =
GetInputs(graph, layerIndex);
2906 auto outputs =
GetOutputs(graph, layerIndex);
2909 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2911 auto fbBegin = fbDescriptor->begin();
2912 auto fbSize = fbDescriptor->size();
2914 if (fbBegin->Length() != fbSize->Length())
2916 throw ParseException(fmt::format(
"Begin and size descriptors must have the same length {}",
2921 descriptor.
m_Begin.insert(descriptor.
m_Begin.end(), fbBegin->begin(), fbBegin->end());
2922 descriptor.
m_Size.insert(descriptor.
m_Size.end(), fbSize->begin(), fbSize->end());
2925 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2930 RegisterInputSlots(graph, layerIndex, layer);
2931 RegisterOutputSlots(graph, layerIndex, layer);
2934 void IDeserializer::DeserializerImpl::ParseStridedSlice(
GraphPtr graph,
unsigned int layerIndex)
2944 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2946 auto flatBufferBegin = flatBufferDescriptor->begin();
2947 auto flatBufferEnd = flatBufferDescriptor->end();
2948 auto flatBufferStride = flatBufferDescriptor->stride();
2950 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2951 flatBufferBegin->Length() == flatBufferStride->Length()))
2953 throw ParseException(fmt::format(
"The size of the begin, end, and stride must be equal {}",
2957 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2958 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2959 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2962 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2963 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2964 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2965 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2966 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2967 descriptor.m_DataLayout =
ToDataLayout(flatBufferDescriptor->dataLayout());
2970 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2975 RegisterInputSlots(graph, layerIndex, layer);
2976 RegisterOutputSlots(graph, layerIndex, layer);
2979 void IDeserializer::DeserializerImpl::ParseSubtraction(
GraphPtr graph,
unsigned int layerIndex)
2982 auto inputs =
GetInputs(graph, layerIndex);
2986 auto outputs =
GetOutputs(graph, layerIndex);
2995 RegisterInputSlots(graph, layerIndex, layer);
2996 RegisterOutputSlots(graph, layerIndex, layer);
2999 void IDeserializer::DeserializerImpl::ParseGather(
GraphPtr graph,
unsigned int layerIndex)
3010 descriptor.
m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3013 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
3018 RegisterInputSlots(graph, layerIndex, layer);
3019 RegisterOutputSlots(graph, layerIndex, layer);
3022 void IDeserializer::DeserializerImpl::ParseGatherNd(
GraphPtr graph,
unsigned int layerIndex)
3038 RegisterInputSlots(graph, layerIndex, layer);
3039 RegisterOutputSlots(graph, layerIndex, layer);
3042 void IDeserializer::DeserializerImpl::ParseMean(
GraphPtr graph,
unsigned int layerIndex)
3052 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3053 auto flatBufferAxis = flatBufferDescriptor->axis();
3054 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3057 descriptor.
m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3061 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3066 RegisterInputSlots(graph, layerIndex, layer);
3067 RegisterOutputSlots(graph, layerIndex, layer);
3070 void IDeserializer::DeserializerImpl::ParseSplitter(
GraphPtr graph,
unsigned int layerIndex)
3079 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3080 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3081 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3082 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3083 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3084 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3091 for(
unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3093 for (
unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3095 viewsDescriptor.
SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3096 viewsDescriptor.
SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3101 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3104 for(
unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3110 RegisterInputSlots(graph, layerIndex, layer);
3111 RegisterOutputSlots(graph, layerIndex, layer);
3129 void IDeserializer::DeserializerImpl::ParseLstm(
GraphPtr graph,
unsigned int layerIndex)
3133 auto inputs =
GetInputs(graph, layerIndex);
3136 auto outputs =
GetOutputs(graph, layerIndex);
3139 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3141 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3142 auto flatBufferInputParams = flatBufferLayer->inputParams();
3172 if (!lstmDescriptor.m_CifgEnabled)
3174 inputToInputWeights =
ToConstTensor(flatBufferInputParams->inputToInputWeights());
3175 recurrentToInputWeights =
ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3176 cellToInputWeights =
ToConstTensor(flatBufferInputParams->cellToInputWeights());
3177 inputGateBias =
ToConstTensor(flatBufferInputParams->inputGateBias());
3187 if (lstmDescriptor.m_ProjectionEnabled)
3189 projectionWeights =
ToConstTensor(flatBufferInputParams->projectionWeights());
3190 projectionBias =
ToConstTensor(flatBufferInputParams->projectionBias());
3198 if (lstmDescriptor.m_PeepholeEnabled)
3200 cellToForgetWeights =
ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3201 cellToOutputWeights =
ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3211 if (lstmDescriptor.m_LayerNormEnabled)
3213 if (!lstmDescriptor.m_CifgEnabled)
3215 inputLayerNormWeights =
ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3218 forgetLayerNormWeights =
ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3219 cellLayerNormWeights =
ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3220 outputLayerNormWeights =
ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3227 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3241 RegisterInputSlots(graph, layerIndex, layer);
3242 RegisterOutputSlots(graph, layerIndex, layer);
3254 desc.
m_CellClip = qLstmDescriptor->cellClip();
3268 void IDeserializer::DeserializerImpl::ParseQLstm(
GraphPtr graph,
unsigned int layerIndex)
3272 auto inputs =
GetInputs(graph, layerIndex);
3275 auto outputs =
GetOutputs(graph, layerIndex);
3278 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3280 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3281 auto flatBufferInputParams = flatBufferLayer->inputParams();
3312 if (!qLstmDescriptor.m_CifgEnabled)
3314 inputToInputWeights =
ToConstTensor(flatBufferInputParams->inputToInputWeights());
3315 recurrentToInputWeights =
ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3316 inputGateBias =
ToConstTensor(flatBufferInputParams->inputGateBias());
3327 if (qLstmDescriptor.m_ProjectionEnabled)
3329 projectionWeights =
ToConstTensor(flatBufferInputParams->projectionWeights());
3330 projectionBias =
ToConstTensor(flatBufferInputParams->projectionBias());
3341 if (qLstmDescriptor.m_PeepholeEnabled)
3343 if (!qLstmDescriptor.m_CifgEnabled)
3345 cellToInputWeights =
ToConstTensor(flatBufferInputParams->cellToInputWeights());
3349 cellToForgetWeights =
ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3350 cellToOutputWeights =
ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3362 if (qLstmDescriptor.m_LayerNormEnabled)
3364 if (!qLstmDescriptor.m_CifgEnabled)
3366 inputLayerNormWeights =
ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3370 forgetLayerNormWeights =
ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3371 cellLayerNormWeights =
ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3372 outputLayerNormWeights =
ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3379 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3390 RegisterInputSlots(graph, layerIndex, layer);
3391 RegisterOutputSlots(graph, layerIndex, layer);
3394 void IDeserializer::DeserializerImpl::ParseQuantizedLstm(
GraphPtr graph,
unsigned int layerIndex)
3398 auto inputs =
GetInputs(graph, layerIndex);
3401 auto outputs =
GetOutputs(graph, layerIndex);
3404 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3406 auto flatBufferInputParams = flatBufferLayer->inputParams();
3436 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3444 RegisterInputSlots(graph, layerIndex, layer);
3445 RegisterOutputSlots(graph, layerIndex, layer);
3448 void IDeserializer::DeserializerImpl::ParseDequantize(
GraphPtr graph,
unsigned int layerIndex)
3458 const std::string layerName =
GetLayerName(graph, layerIndex);
3464 RegisterInputSlots(graph, layerIndex, layer);
3465 RegisterOutputSlots(graph, layerIndex, layer);
3468 void IDeserializer::DeserializerImpl::ParseMerge(
GraphPtr graph,
unsigned int layerIndex)
3478 const std::string layerName =
GetLayerName(graph, layerIndex);
3484 RegisterInputSlots(graph, layerIndex, layer);
3485 RegisterOutputSlots(graph, layerIndex, layer);
3488 void IDeserializer::DeserializerImpl::ParseSwitch(
GraphPtr graph,
unsigned int layerIndex)
3491 auto inputs =
GetInputs(graph, layerIndex);
3495 auto outputs =
GetOutputs(graph, layerIndex);
3507 RegisterInputSlots(graph, layerIndex, layer);
3508 RegisterOutputSlots(graph, layerIndex, layer);
3511 void IDeserializer::DeserializerImpl::ParsePrelu(
GraphPtr graph,
unsigned int layerIndex)
3514 auto inputs =
GetInputs(graph, layerIndex);
3518 auto outputs =
GetOutputs(graph, layerIndex);
3527 RegisterInputSlots(graph, layerIndex, layer);
3528 RegisterOutputSlots(graph, layerIndex, layer);
3531 void IDeserializer::DeserializerImpl::ParseTranspose(
GraphPtr graph,
unsigned int layerIndex)
3535 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3537 auto inputs =
GetInputs(graph, layerIndex);
3540 auto outputs =
GetOutputs(graph, layerIndex);
3547 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3550 RegisterInputSlots(graph, layerIndex, layer);
3551 RegisterOutputSlots(graph, layerIndex, layer);
3554 void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(
GraphPtr graph,
unsigned int layerIndex)
3558 auto inputs =
GetInputs(graph, layerIndex);
3561 auto outputs =
GetOutputs(graph, layerIndex);
3564 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3566 auto serializerDescriptor = serializerLayer->descriptor();
3569 descriptor.
m_PadLeft = serializerDescriptor->padLeft();
3570 descriptor.
m_PadRight = serializerDescriptor->padRight();
3571 descriptor.
m_PadTop = serializerDescriptor->padTop();
3572 descriptor.
m_PadBottom = serializerDescriptor->padBottom();
3573 descriptor.
m_StrideX = serializerDescriptor->strideX();
3574 descriptor.
m_StrideY = serializerDescriptor->strideY();;
3575 descriptor.
m_BiasEnabled = serializerDescriptor->biasEnabled();;
3584 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3587 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3593 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3595 RegisterInputSlots(graph, layerIndex, layer);
3596 RegisterOutputSlots(graph, layerIndex, layer);
3599 void IDeserializer::DeserializerImpl::ParseStack(
GraphPtr graph,
unsigned int layerIndex)
3602 auto inputs =
GetInputs(graph, layerIndex);
3604 auto outputs =
GetOutputs(graph, layerIndex);
3607 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3608 unsigned int axis = flatBufferDescriptor->axis();
3609 unsigned int numInputs = flatBufferDescriptor->numInputs();
3612 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3613 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3614 flatBufferInputShape->begin() + flatBufferInputShape->size());
3616 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3619 for (
unsigned int i=0; i<inputs.size(); ++i)
3622 if (descriptor.m_InputShape != inputShape)
3624 std::stringstream ss;
3625 ss <<
"Shape of input " 3629 <<
" does not equal defined input shape " 3630 << descriptor.m_InputShape
3638 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3643 RegisterInputSlots(graph, layerIndex, layer);
3644 RegisterOutputSlots(graph, layerIndex, layer);
3647 void IDeserializer::DeserializerImpl::ParseStandIn(
GraphPtr graph,
unsigned int layerIndex)
3651 auto inputs =
GetInputs(graph, layerIndex);
3652 auto outputs =
GetOutputs(graph, layerIndex);
3654 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3655 auto fbDescriptor = fbLayer->descriptor();
3658 descriptor.
m_NumInputs = fbDescriptor->numInputs();
3664 const std::string layerName =
GetLayerName(graph, layerIndex);
3667 for (
unsigned int i = 0u; i < descriptor.
m_NumOutputs; ++i)
3673 RegisterInputSlots(graph, layerIndex, layer);
3674 RegisterOutputSlots(graph, layerIndex, layer);
3694 void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(
GraphPtr graph,
unsigned int layerIndex)
3698 auto inputs =
GetInputs(graph, layerIndex);
3701 auto outputs =
GetOutputs(graph, layerIndex);
3704 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3706 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3707 auto flatBufferInputParams = flatBufferLayer->inputParams();
3737 if (!descriptor.m_CifgEnabled)
3739 inputToInputWeights =
ToConstTensor(flatBufferInputParams->inputToInputWeights());
3740 recurrentToInputWeights =
ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3741 inputGateBias =
ToConstTensor(flatBufferInputParams->inputGateBias());
3747 if (descriptor.m_PeepholeEnabled)
3749 cellToInputWeights =
ToConstTensor(flatBufferInputParams->cellToInputWeights());
3756 if (descriptor.m_ProjectionEnabled)
3758 projectionWeights =
ToConstTensor(flatBufferInputParams->projectionWeights());
3759 projectionBias =
ToConstTensor(flatBufferInputParams->projectionBias());
3767 if (descriptor.m_PeepholeEnabled)
3769 cellToForgetWeights =
ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3770 cellToOutputWeights =
ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3780 if (descriptor.m_LayerNormEnabled)
3782 if (!descriptor.m_CifgEnabled)
3784 inputLayerNormWeights =
ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3787 forgetLayerNormWeights =
ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3788 cellLayerNormWeights =
ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3789 outputLayerNormWeights =
ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3796 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3809 RegisterInputSlots(graph, layerIndex, layer);
3810 RegisterOutputSlots(graph, layerIndex, layer);
static armnn::NormalizationDescriptor GetNormalizationDescriptor(NormalizationDescriptorPtr normalizationDescriptor, unsigned int layerIndex)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
float m_Eps
Used to avoid dividing by zero.
virtual unsigned int GetNumOutputSlots() const =0
Returns the number of connectable output slots.
armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
bool m_ProjectionEnabled
Enable/disable the projection layer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
UnaryOperation m_Operation
Specifies the elementwiseUnary operation to execute.
static TensorRawPtrVector GetOutputs(const GraphPtr &graph, unsigned int layerIndex)
A ViewsDescriptor for the SplitterLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
float m_ScaleW
Center size encoding scale weight.
#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
float m_K
Kappa value used for the across channel normalization equation.
uint32_t GetNumInputs() const
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
const TensorShape & GetShape() const
uint32_t m_PoolWidth
Pooling width value.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_PadLeft
Padding left value in the width dimension.
float m_ClippingThresProj
Clipping threshold value for the projection.
uint32_t m_PoolDepth
Pooling depth value.
std::string AsString() const
static LayerBaseRawPtr GetBaseLayer(const GraphPtr &graphPtr, unsigned int layerIndex)
A ReshapeDescriptor for the ReshapeLayer.
const armnnSerializer::ConstTensor * ConstTensorRawPtr
uint32_t m_PadBack
Padding back value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
const armnnSerializer::NormalizationDescriptor * NormalizationDescriptorPtr
A ComparisonDescriptor for the ComparisonLayer.
static GraphPtr LoadGraphFromBinary(const uint8_t *binaryContent, size_t len)
float m_ScaleX
Center size encoding scale x.
bool m_TransposeWeightMatrix
Enable/disable transpose weight matrix.
uint32_t m_PoolWidth
Pooling width value.
bool m_PeepholeEnabled
Enable/disable peephole.
uint32_t GetNumInputs() const
Get the number of views/inputs.
#define CHECK_TENSOR_PTR(TENSOR_PTR)
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
A Convolution2dDescriptor for the Convolution2dLayer.
float m_Alpha
Alpha value for the normalization equation.
uint32_t m_PadLeft
Padding left value in the width dimension.
const armnnSerializer::QLstmDescriptor * QLstmDescriptorPtr
static armnn::UnidirectionalSequenceLstmDescriptor GetUnidirectionalSequenceLstmDescriptor(UnidirectionalSequenceLstmDescriptorPtr descriptor)
bool m_KeepDims
if true then output shape has no change.
float m_HiddenStateScale
Hidden State quantization scale.
const char * EnumNameConstTensorData(ConstTensorData e)
bool m_BiasEnabled
Enable/disable bias.
unsigned int GetNumBytes() const
float m_OutputIntermediateScale
Output intermediate quantization scale.
ResizeMethod m_Method
The Interpolation method to use (Bilinear, NearestNeighbor).
float m_Gamma
Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0. ...
float m_Beta
Exponentiation value.
BindingPointInfo GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
Retrieve binding info (layer id and tensor info) for the network input identified by the given layer ...
std::vector< unsigned int > m_Size
Size of the slice in each dimension.
armnn::INetworkPtr CreateNetworkFromBinary(const std::vector< uint8_t > &binaryContent)
Create an input network from binary file contents.
The padding fields don't count and are ignored.
float m_Eps
Value to add to the variance. Used to avoid dividing by zero.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
ArgMinMaxFunction m_Function
Specify if the function is to find Min or Max.
uint32_t m_DetectionsPerClass
Detections per classes, used in Regular NMS.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
void CheckLayers(Graph &graph)
#define ARMNN_LOG(severity)
uint32_t m_PadRight
Padding right value in the width dimension.
const armnnSerializer::SerializedGraph * GetSerializedGraph(const void *buf)
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
bool m_BiasEnabled
Enable/disable bias.
A LogicalBinaryDescriptor for the LogicalBinaryLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
MemoryType GetMemoryArea() const
ReduceOperation m_ReduceOperation
Specifies the reduction operation to execute.
bool m_TimeMajor
Enable/disable time major.
Copyright (c) 2021 ARM Limited and Contributors.
DataLayout m_DataLayout
The data layout to be used (NCDHW, NDHWC).
void IgnoreUnused(Ts &&...)
uint32_t m_PadBottom
Padding bottom value in the height dimension.
uint32_t m_PadFront
Padding front value in the depth dimension.
#define CHECK_GRAPH(GRAPH, LAYERS_INDEX)
uint32_t m_DilationY
Dilation along y axis.
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
const armnnSerializer::SerializedGraph * GraphPtr
std::vector< std::pair< unsigned int, unsigned int > > m_PadList
Specifies the padding values for the input dimension: heightPad{top, bottom} widthPad{left, right}.
uint32_t m_PoolHeight
Pooling height value.
uint32_t m_DilationX
Dilation along x axis.
uint32_t m_DilationY
Dilation factor value for height dimension.
const armnnSerializer::Pooling2dDescriptor * Pooling2dDescriptor
LogicalBinaryOperation m_Operation
Specifies the logical operation to execute.
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static int32_t GetBindingLayerInfo(const GraphPtr &graphPtr, unsigned int layerIndex)
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_NumOutputs
Number of output tensors.
NormalizationAlgorithmMethod m_NormMethodType
Normalization method algorithm to use (LocalBrightness, LocalContrast).
void SetShape(const TensorShape &newShape)
A ResizeBilinearDescriptor for the ResizeBilinearLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
uint32_t m_MaxClassesPerDetection
Maximum numbers of classes per detection, used in Fast NMS.
const armnnSerializer::LayerBase * LayerBaseRawPtr
std::vector< unsigned int > m_Axis
Values for the dimensions to reduce.
A StackDescriptor for the StackLayer.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
TensorShape m_TargetShape
Target shape value.
uint32_t m_PoolHeight
Pooling height value.
uint32_t m_PadTop
Padding top value in the height dimension.
uint32_t m_MaxDetections
Maximum numbers of detections.
A PadDescriptor for the PadLayer.
std::vector< TensorRawPtr > TensorRawPtrVector
void Permute(const armnn::TensorShape &dstShape, const armnn::PermutationVector &mappings, const void *src, void *dst, size_t dataTypeSize)
#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE)
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
const armnnSerializer::UnidirectionalSequenceLstmDescriptor * UnidirectionalSequenceLstmDescriptorPtr
uint32_t m_PadBack
Padding back value in the depth dimension.
armnn::INetworkPtr CreateNetworkFromBinary(const std::vector< uint8_t > &binaryContent)
Create an input network from binary file contents.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
bool m_LayerNormEnabled
Enable/disable layer normalization.
const armnnSerializer::LstmDescriptor * LstmDescriptorPtr
armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
bool CheckShape(const armnn::TensorShape &actual, const std::vector< uint32_t > &expected)
float m_NmsIouThreshold
Intersection over union threshold.
static armnn::LstmDescriptor GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
An LstmDescriptor for the LstmLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
uint32_t m_DilationX
Dilation factor value for width dimension.
uint32_t m_PadTop
Padding top value in the height dimension.
std::string FileLine() const
Status SetViewSize(uint32_t view, uint32_t coord, uint32_t value)
Set the size of the views.
#define ARMNN_ASSERT_MSG(COND, MSG)
std::vector< unsigned int > m_Begin
Beginning indices of the slice in each dimension.
bool m_KeepDims
Enable/disable keep dimensions. If true, then the reduced dimensions that are of length 1 are kept...
std::vector< unsigned int > m_BlockShape
Block shape values.
float m_Eps
Epsilon, small scalar value added to variance to avoid dividing by zero. Defaults to 1e-12f...
An output connection slot for a layer.
A L2NormalizationDescriptor for the L2NormalizationLayer.
static TensorRawPtrVector GetInputs(const GraphPtr &graph, unsigned int layerIndex)
An ArgMinMaxDescriptor for ArgMinMaxLayer.
An OriginsDescriptor for the ConcatLayer.
A ReduceDescriptor for the REDUCE operators.
float m_ProjectionClip
Clipping threshold value for the projection.
A FullyConnectedDescriptor for the FullyConnectedLayer.
BindingPointInfo GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
Retrieve binding info (layer id and tensor info) for the network output identified by the given layer...
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
float m_InputIntermediateScale
Input intermediate quantization scale.
OutputShapeRounding m_OutputShapeRounding
The rounding method for the output shape. (Floor, Ceiling).
uint32_t m_TargetWidth
Target width value.
A GatherDescriptor for the GatherLayer.
uint32_t m_PadBottom
Padding bottom value in the height dimension.
#define CHECK_VALID_SIZE(ACTUAL,...)
bool m_PeepholeEnabled
Enable/disable peephole.
uint32_t m_NumClasses
Number of classes.
#define CHECKED_NON_NEGATIVE(VALUE)
bool m_HalfPixelCenters
Half Pixel Centers.
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> IDeserializerPtr
armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
uint32_t m_PadTop
Padding top value in the height dimension.
armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
#define ARMNN_ASSERT(COND)
A StandInDescriptor for the StandIn layer.
A QLstmDescriptor for the QLstmLayer.
#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR)
bool m_UseRegularNms
Use Regular NMS.
uint32_t m_PadFront
Padding front value in the depth dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
std::vector< unsigned int > m_BlockShape
Block shape value.
PaddingMode
The padding mode controls whether the padding should be filled with constant values (Constant)...
An ActivationDescriptor for the ActivationLayer.
const TensorInfo & GetInfo() const
min(a, max(b, input)) ReLu1 & ReLu6.
uint32_t m_PadLeft
Padding left value in the width dimension.
uint32_t m_TargetHeight
Target height value.
uint32_t m_ActivationFunc
The activation function to use.
A SliceDescriptor for the SliceLayer.
static armnn::Pooling3dDescriptor GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDescriptor, unsigned int layerIndex)
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A Convolution3dDescriptor for the Convolution3dLayer.
uint32_t m_PadRight
Padding right value in the width dimension.
float m_ClippingThresCell
Clipping threshold value for the cell state.
unsigned int m_BlockSize
Scalar specifying the input block size. It must be >= 1.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
float m_ForgetIntermediateScale
Forget intermediate quantization scale.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
float m_Beta
Beta, the offset scalar value applied for the normalized tensor. Defaults to 1.0. ...
armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
A Pooling3dDescriptor for the Pooling3dLayer.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
float m_ScaleH
Center size encoding scale height.
static armnn::Pooling2dDescriptor GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDescriptor, unsigned int layerIndex)
ComparisonOperation m_Operation
Specifies the comparison operation to execute.
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
DataLayout m_DataLayout
The data layout to be used (NDHWC, NCDHW).
NormalizationAlgorithmChannel m_NormChannelType
Normalization channel algorithm to use (Across, Within).
float m_CellClip
Clipping threshold value for the cell state.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
uint32_t m_DilationX
Dilation along x axis.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
const armnnSerializer::TensorInfo * TensorRawPtr
bool m_CifgEnabled
Enable/disable cifg (coupled input & forget gate).
armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
uint32_t GetNumInputs() const
Get the number of views/inputs.
uint32_t m_PadLeft
Padding left value in the width dimension.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
bool m_AlignCorners
Aligned corners.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
int32_t m_Axis
The axis in params to gather indices from.
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
PoolingAlgorithm m_PoolType
The pooling algorithm to use (Max. Average, L2).
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
uint32_t m_PadLeft
Padding left value in the width dimension.
The padding fields count, but are ignored.
std::vector< std::pair< unsigned int, unsigned int > > m_Crops
The values to crop from the input dimension.
Base class for all ArmNN exceptions so that users can filter to just those.
static std::string GetLayerName(const GraphPtr &graph, unsigned int index)
uint32_t m_PadTop
Padding top value in the height dimension.
unsigned int GetNumDimensions() const
Function that returns the tensor rank.
uint32_t m_PadTop
Padding top value in the height dimension.
bool m_ProjectionEnabled
Enable/disable the projection layer.
Jarret 2009: Local Contrast Normalization.
OutputShapeRounding m_OutputShapeRounding
The rounding method for the output shape. (Floor, Ceiling).
uint32_t m_NumInputs
Number of input tensors.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
A MeanDescriptor for the MeanLayer.
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
static armnn::QLstmDescriptor GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptorPtr)
static armnn::TensorInfo OutputShapeOfReshape(const armnn::TensorInfo &inputTensorInfo, const std::vector< uint32_t > &targetDimsIn)
bool m_LayerNormEnabled
Enable/disable layer normalization.
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
uint32_t m_PadRight
Padding right value in the width dimension.
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
uint32_t m_Axis
Axis to apply channel shuffle operation on.
uint32_t GetNumInputs() const
Get the number of views/inputs.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
int m_Axis
Axis to reduce across the input tensor.
float m_ScaleY
Center size encoding scale y.
float m_NmsScoreThreshold
NMS score threshold.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
virtual int Connect(IInputSlot &destination)=0
Krichevsky 2012: Local Brightness Normalization.
const char * EnumNameDataType(DataType e)
A Pooling2dDescriptor for the Pooling2dLayer.
A NormalizationDescriptor for the NormalizationLayer.
uint32_t m_StrideX
Stride value when proceeding through input for the width dimension.
DataLayout m_DataLayout
The data layout to be used (NCHW, NHWC).
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
PaddingMethod m_PaddingMethod
The padding method to be used. (Exclude, IgnoreValue).
A ChannelShuffleDescriptor for the ChannelShuffle operator.
float m_CellIntermediateScale
Cell intermediate quantization scale.
uint32_t m_DilationZ
Dilation along z axis.
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
armnn::TensorShape Permuted(const armnn::TensorShape &srcShape, const armnn::PermutationVector &mappings)
A SoftmaxDescriptor for the SoftmaxLayer.
float m_Beta
Beta value for the normalization equation.
uint32_t m_StrideZ
Stride value when proceeding through input for the depth dimension.
const armnnSerializer::OriginsDescriptor * GetOriginsDescriptor(const armnnSerializer::SerializedGraph *graph, unsigned int layerIndex)
bool m_CifgEnabled
Enable/disable CIFG (coupled input & forget gate).
uint32_t m_NormSize
Depth radius value.
Status SetViewOriginCoord(uint32_t view, uint32_t coord, uint32_t value)
Set the view origin coordinates.
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
uint32_t m_StrideY
Stride value when proceeding through input for the height dimension.
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
constexpr unsigned int MaxNumOfTensorDimensions
uint32_t m_DilationY
Dilation along y axis.
A FillDescriptor for the FillLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
uint32_t m_PadLeft
Padding left value in the width dimension.
unsigned int GetNumElements() const
constexpr unsigned int GetDataTypeSize(DataType dataType)
A PermuteDescriptor for the PermuteLayer.
const armnnSerializer::Pooling3dDescriptor * Pooling3dDescriptor
uint32_t m_PadRight
Padding right value in the width dimension.
int32_t m_HiddenStateZeroPoint
Hidden State zero point.
bool m_ConstantWeights
Enable/disable constant weights and biases.