ArmNN
 20.11
RecordByRecordCaffeParser.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
7 
8 #include "armnn/Exceptions.hpp"
9 #include "armnn/Utils.hpp"
11 
12 #include "GraphTopologicalSort.hpp"
13 
14 // Caffe
15 #include <google/protobuf/wire_format.h>
16 
17 
18 //#include <stdio.h>
19 #include <limits.h>
20 #include <sstream>
21 //#include <iostream>
22 #include <fstream>
23 
24 namespace armnnCaffeParser
25 {
26 // class which holds information on the absolute position in the stream
27 // of the data and the length of the data record.
28 class VarLenDataInfo
29 {
30 public:
31  VarLenDataInfo(std::streamoff positionOfData, size_t sizeOfData) :
32  m_PositionOfData(positionOfData), m_SizeOfData(sizeOfData) {}
33 
34  VarLenDataInfo(const VarLenDataInfo& x) :
35  m_PositionOfData(x.PositionOfData()), m_SizeOfData (x.SizeOfData()) {}
36 
37  VarLenDataInfo& operator=(const VarLenDataInfo& x)
38  {
39  // handle self assignment
40  if (this == &x) {
41  return *this;
42  }
43  m_PositionOfData = x.PositionOfData(); m_SizeOfData = x.SizeOfData(); return *this;
44  }
45 
46  std::streamoff PositionOfData() const {return m_PositionOfData;}
47  size_t SizeOfData() const {return m_SizeOfData;}
48 
49 private:
50  std::streamoff m_PositionOfData;
51  size_t m_SizeOfData;
52 
53 };
54 
55 // class which holds enough information on a LayerParameter in the Caffe protobuf
56 // format to allow it to be resolved for in place layering and sorted topologically
57 // prior to the entire record being parsed into memory.
58 //
59 // NOTE: function naming follows that of the protobuf classes these proxies are standing in for
60 class LayerParameterInfo : public VarLenDataInfo
61 {
62 public:
63  static const std::string INPUT;
64  LayerParameterInfo(const VarLenDataInfo& varLenDataInfo) :
65  VarLenDataInfo(varLenDataInfo.PositionOfData(), varLenDataInfo.SizeOfData()),
66  m_newTops(false), m_newBottoms(false) {}
67 
68  LayerParameterInfo(std::streamoff positionOfData, size_t sizeOfData) :
69  VarLenDataInfo(positionOfData, sizeOfData), m_newTops(false), m_newBottoms(false) {}
70 
71  LayerParameterInfo(const LayerParameterInfo& x) :
72  VarLenDataInfo(x.PositionOfData(), x.SizeOfData()),
73  m_name(x.m_name),
74  m_type(x.m_type),
75  m_tops(x.m_tops),
76  m_bottoms(x.m_bottoms),
77  m_newTops(x.m_newTops),
78  m_newBottoms(x.m_newBottoms) {}
79 
80  LayerParameterInfo& operator=(const LayerParameterInfo& x)
81  {
82  if (this == &x) {
83  return *this;
84  }
85  VarLenDataInfo::operator=(x);
86  m_name = x.m_name;
87  m_type = x.m_type;
88  m_tops = x.m_tops;
89  m_bottoms = x.m_bottoms;
90  m_newTops = x.m_newTops;
91  m_newBottoms = x.m_newBottoms;
92  return *this;
93  }
94 
95  const std::string name() const {return m_name;}
96  void set_name(const std::unique_ptr<char[]>& theName, size_t length)
97  {
98  m_name = std::string(theName.get(), length);
99  }
100  void set_name(const std::string& theName) {m_name = theName;}
101 
102  const std::string type() const {return m_type;}
103  void set_type(const std::unique_ptr<char[]>& theType, size_t length)
104  {
105  m_type = std::string(theType.get(), length);
106  }
107  void set_type(const std::string& theType) {m_type = theType;}
108 
109  void add_top(const std::unique_ptr<char[]>& top, size_t length)
110  {
111  std::string topName(top.get(), length);
112  m_tops.push_back(topName);
113  }
114  void add_top(const std::string& topName)
115  {
116  m_tops.push_back(topName);
117  }
118  const std::string top(unsigned long i) const {return m_tops[i];}
119  unsigned long top_size() const {return m_tops.size();}
120  void set_top(unsigned long i, const std::string& newName) {m_tops[i] = newName; m_newTops = true;}
121  bool new_tops() const {return m_newTops;}
122 
123  void add_bottom(const std::unique_ptr<char[]>& bottom, size_t length)
124  {
125  std::string bottomName(bottom.get(), length);
126  m_bottoms.push_back(bottomName);
127  }
128  unsigned long bottom_size() const {return m_bottoms.size();}
129  const std::string bottom(unsigned long i) const {return m_bottoms[i];}
130  void set_bottom(unsigned long i, const std::string& newName) {m_bottoms[i] = newName; m_newBottoms = true;}
131  bool new_bottoms() const {return m_newBottoms;}
132 
133  // if the position and size of the data is zero and the type is "Input" then this is an 'Implicit Input Layer'
134  // and needs to be handled differently from ordinary layers.
135  bool isImplicitInputLayer() const
136  {
137  if ((PositionOfData() == 0) && (SizeOfData() == 0) && INPUT.compare(type()) == 0)
138  {return true;} else {return false;}
139  }
140 
141 private:
142  std::string m_name;
143  std::string m_type;
144  std::vector<std::string> m_tops;
145  std::vector<std::string> m_bottoms;
146  // mark the layers whose topology was changed
147  // by the ResolveInPlaceLayers method.
148  bool m_newTops;
149  bool m_newBottoms;
150 };
151 
152 // class which holds the field type (wire type) and field id (id from the .proto schema)
153 // read from the protobuf messages as per the binary encoding described in
154 // https://developers.google.com/protocol-buffers/docs/encoding
155 //
156 // NOTE: function naming follows that of the protobuf classes these proxies are standing in for
157 class ProtobufFieldInfo
158 {
159 public:
160  ProtobufFieldInfo(int field_type, int field_id) :
161  m_eof(false), m_field_type(field_type), m_field_id(field_id) {}
162  ProtobufFieldInfo() : m_eof(true), m_field_type(0), m_field_id(0) {}
163 
164  bool eof() {return m_eof;}
165  int field_type() {return m_field_type;}
166  int field_id() {return m_field_id;}
167 
168 private:
169  bool m_eof;
170  int m_field_type;
171  int m_field_id;
172 };
173 
174 
175 // There are some NetParameter level data which are required
176 // to correctly processes some Caffe models. Specifically those which
177 // have 'implicit' input layers. Also it is nice to have the name of the model.
178 //
179 // NOTE: function naming follows that of the protobuf classes these proxies are standing in for
180 class NetParameterInfo
181 {
182 public:
183  const std::string name() const {return m_name;}
184  void set_name(const std::unique_ptr<char[]>& theName, size_t length)
185  {
186  m_name = std::string(theName.get(), length);
187  }
188 
189  void add_input(const std::unique_ptr<char[]>& input, size_t length)
190  {
191  std::string inputName(input.get(), length);
192  m_inputs.push_back(inputName);
193  }
194  const std::string input(unsigned long i) const {return m_inputs[i];}
195  unsigned long input_size() const {return m_inputs.size();}
196 
197  void add_input_dimension(int input_dimension) {
198  m_input_dimensions.push_back(input_dimension);
199  }
200  int input_dimension(unsigned long i) const {return m_input_dimensions[i];}
201  unsigned long input_dimensions_size() const {return m_input_dimensions.size();}
202 
203  void add_blob_shape(caffe::BlobShape shape) {
204  m_blob_shapes.push_back(shape);
205  }
206  const caffe::BlobShape blob_shape(unsigned long i) const {return m_blob_shapes[i];}
207  unsigned long blob_shapes_size() const {return m_blob_shapes.size();}
208 
209 private:
210  std::string m_name;
211  std::vector<std::string> m_inputs;
212  std::vector<int> m_input_dimensions;
213  std::vector<caffe::BlobShape> m_blob_shapes;
214 
215 };
216 
217 }; // namespace armnnCaffeParser
218 
219 using namespace armnnCaffeParser;
220 
221 // Initialise the class const
222 const std::string LayerParameterInfo::INPUT = "Input";
223 
224 namespace
225 {
226 
227 ProtobufFieldInfo readFieldInfo(std::ifstream& ifs)
228 {
229  unsigned char first_byte = static_cast<unsigned char>(ifs.get());
230  if (!ifs.good())
231  {
232  ProtobufFieldInfo eof;
233  return eof;
234  }
235  int field_type = first_byte&7;
236  int field_id = first_byte>>3;
237  if ((field_id & 16) == 16)
238  {
239  unsigned char second_byte = static_cast<unsigned char>(ifs.get());
240  if (!ifs.good())
241  {
242  ProtobufFieldInfo eof;
243  return eof;
244  }
245  field_id = (field_id-16) + ((second_byte&127)<<4);
246  }
247  ProtobufFieldInfo fieldInfo(field_type, field_id);
248  return fieldInfo;
249 }
250 
251 const static int MAX_NUM_BYTES = 5;
252 
253 int ReadBase128(std::ifstream& ifs)
254 {
255  int result = 0;
256  unsigned int shift_by = 0;
257  int bytesRead = 0;
258  while (true)
259  {
260  unsigned char a_byte = static_cast<unsigned char>(ifs.get());
261  ++bytesRead;
262  if (bytesRead > MAX_NUM_BYTES)
263  {
264  throw armnn::ParseException(
265  "ReadBase128 exceeded the maximum number of bytes expected for an integer representation");
266  }
267  result += (a_byte & 127) << shift_by;
268  shift_by += 7;
269  if ((a_byte & 128) != 128)
270  {
271  break;
272  }
273  }
274  return result;
275 }
276 
277 
278 std::unique_ptr<char[]> AllocateBuffer(std::ifstream& ifs, VarLenDataInfo& dataInfo)
279 {
280  std::unique_ptr<char[]> ptr(new char[dataInfo.SizeOfData()]);
281  ifs.clear();
282  ifs.seekg(dataInfo.PositionOfData(), std::ios_base::beg);
283  ifs.read(ptr.get(), armnn::numeric_cast<std::streamsize>(dataInfo.SizeOfData()));
284  return ptr;
285 }
286 
287 VarLenDataInfo CreateVarLenDataInfo(std::streamoff bufferStart, std::streamoff endOfLayer) {
288  std::streamoff sizeOfLayer = endOfLayer - bufferStart;
289  if (sizeOfLayer < 0)
290  {
291  std::stringstream ss;
292  ss << "error when determining buffer size, negative value [" << sizeOfLayer << "]";
293  throw armnn::ParseException(ss.str());
294  }
295  // NOTE: as some of the data being read in will be translated into strings (names of layers etc)
296  // the maximum size we can deal with is the upper size limit of a string i.e. size_t
297  // on the platform in which I am currently compiling std::streamoff is signed long int and
298  // size_t is unsigned long int so there is no way this error condition can fire but this stuff
299  // is supposed to be portable so the check remains in place
300  if (armnn::numeric_cast<size_t>(sizeOfLayer) > SIZE_MAX) {
301  std::stringstream ss;
302  ss << "layer is greater than " << SIZE_MAX << " in size cannot process. layer size = [" << sizeOfLayer << "]";
303  throw armnn::ParseException(ss.str());
304  }
305  LayerParameterInfo info(bufferStart, armnn::numeric_cast<size_t>(sizeOfLayer));
306  return info;
307 }
308 
309 void ReadTopologicalInfoForLayerParameter(LayerParameterInfo& layerInfo, std::ifstream& ifs)
310 {
311  // position the file pointer to the start of the layer data
312  ifs.clear();
313  ifs.seekg(layerInfo.PositionOfData(), std::ios_base::beg);
314  std::streamoff endOfLayer = layerInfo.PositionOfData() +
315  armnn::numeric_cast<std::streamoff>(layerInfo.SizeOfData());
316  while(true)
317  {
318  // check to see if we have reached the end of the record
319  std::streamoff currentPosition = ifs.tellg();
320  if (currentPosition >= endOfLayer) {
321  return;
322  }
323  // read the information for the next field.
324  ProtobufFieldInfo fieldInfo = readFieldInfo(ifs);
325  if (fieldInfo.eof())
326  {
327  return;
328  // TODO: figure out whether this is an error condition or not...
329  //throw armnn::ParseException("failed to read field from LayerParameter data");
330  }
331  // process the field
332  switch (fieldInfo.field_type())
333  {
334  case 0:
335  {
336  ReadBase128(ifs);
337  break;
338  }
339  case 2:
340  {
341  int size = ReadBase128(ifs);
342  std::streamoff posStartOfData = ifs.tellg();
343  VarLenDataInfo dataInfo(posStartOfData, armnn::numeric_cast<size_t>(size));
344  //optional string name = 1; // the layer name
345  //optional string type = 2; // the layer type
346  //repeated string bottom = 3; // the name of each bottom blob
347  //repeated string top = 4; // the name of each top blob
348  if (fieldInfo.field_id() == 1)
349  {
350  // read and set the name of the layer
351  auto layerName = AllocateBuffer(ifs, dataInfo);
352  layerInfo.set_name(layerName, dataInfo.SizeOfData());
353  }
354  else if (fieldInfo.field_id() == 2)
355  {
356  // read and set the type of the layer
357  auto layerType = AllocateBuffer(ifs, dataInfo);
358  layerInfo.set_type(layerType, dataInfo.SizeOfData());
359  }
360  else if (fieldInfo.field_id() == 3)
361  {
362  // read and add a bottom to the layer
363  auto bottom = AllocateBuffer(ifs, dataInfo);
364  layerInfo.add_bottom(bottom, dataInfo.SizeOfData());
365  }
366  else if (fieldInfo.field_id() == 4)
367  {
368  // read and add a top to the layer
369  auto top = AllocateBuffer(ifs, dataInfo);
370  layerInfo.add_top(top, dataInfo.SizeOfData());
371  }
372  else
373  {
374  ifs.seekg(size, std::ios_base::cur);
375  if (!ifs.good())
376  {
377  // TODO: error out?
378  return;
379  }
380  }
381  break;
382  }
383  case 1:
384  {
385  // 64 bit
386  // advance by eight bytes
387  ifs.seekg(8, std::ios_base::cur);
388  if (!ifs.good())
389  {
390  // TODO: error out?
391  return;
392  }
393  break;
394  }
395  case 5:
396  {
397  // 32 bit
398  // advance by four bytes
399  ifs.seekg(4, std::ios_base::cur);
400  if (!ifs.good())
401  {
402  // TODO: error out?
403  return;
404  }
405  break;
406  }
407  default:
408  {
409  throw armnn::ParseException("Encounted an unknown field type");
410  break;
411  }
412  }
413  }
414 }
415 
416 void ResolveInPlaceLayers(std::vector<LayerParameterInfo>& layerInfo)
417 {
418  std::map<std::string, std::vector<LayerParameterInfo*>> layersByTop;
419  for (auto& info : layerInfo)
420  {
421  for (unsigned long i = 0; i < info.top_size(); ++i)
422  {
423  layersByTop[info.top(i)].push_back(&info);
424  }
425  }
426  // For each set of layers with the same top, resolve them to a linear chain rather than in-place layers.
427  // Note that for 'regular' layers, there will be a single layer in each group and so this will be a no-op.
428  for (auto& layersWithSameTopIterator : layersByTop)
429  {
430  const std::string& top = layersWithSameTopIterator.first;
431  const std::vector<LayerParameterInfo*> layersWithSameTop = layersWithSameTopIterator.second;
432 
433  // Chain the layers together in the order that they are listed in the prototxt (hopefully this is correct).
434  // Note that the last layer will not have its top modified so that other layers will continue to reference it.
435  for (unsigned int layerIdx = 0; layerIdx < layersWithSameTop.size() - 1; ++layerIdx)
436  {
437  LayerParameterInfo* layer1 = layersWithSameTop[layerIdx];
438  LayerParameterInfo* layer2 = layersWithSameTop[layerIdx + 1];
439  if (layer1->top_size() != 1)
440  {
441  throw armnn::ParseException("Node '" + layer1->name() + "' is an in-place layer but "
442  "doesn't have exactly one top.");
443  }
444  std::string newTop = layer1->name() + "_top";
445  layer1->set_top(0, newTop);
446  if (layer2->bottom_size() != 1 || layer2->bottom(0) != top)
447  {
448  throw armnn::ParseException("Node '" + layer2->name() + "' is an in-place layer but "
449  " doesn't have exactly one bottom, or it doesn't match its top.");
450  }
451  layer2->set_bottom(0, newTop);
452 
453  }
454  }
455 }
456 
457 } // anonymous namespace, can't be seen outside this source file
458 
460 {}
461 
463  const char* graphFile,
464  const std::map<std::string, armnn::TensorShape>& inputShapes,
465  const std::vector<std::string>& requestedOutputs)
466 {
467 
468  m_InputShapes = inputShapes;
469  if (requestedOutputs.size() == 0)
470  {
471  throw armnn::ParseException("requestedOutputs must have at least one entry");
472  }
473  m_RequestedOutputs = requestedOutputs;
474 
475  std::ifstream ifs(graphFile, std::ifstream::in|std::ifstream::binary);
476  if (ifs.fail())
477  {
478  throw armnn::FileNotFoundException("Failed to open graph file '" + std::string(graphFile) + "'");
479  }
480 
481  std::vector<LayerParameterInfo> layerInfo;
482  NetParameterInfo netParameterInfo;
483  while(true)
484  {
485  ProtobufFieldInfo fieldInfo = readFieldInfo(ifs);
486  if (fieldInfo.eof())
487  {
488  break;
489  }
490  switch(fieldInfo.field_type())
491  {
492  case 0:
493  {
494  ReadBase128(ifs);
495  break;
496  }
497  case 2:
498  {
499  // The values of interest from the caffe.proto schema are:
500  // optional string name = 1; // consider giving the network a name
501  // DEPRECATED. See InputParameter. The input blobs to the network.
502  // repeated string input = 3;
503  // DEPRECATED. See InputParameter. The shape of the input blobs.
504  // repeated BlobShape input_shape = 8;
505 
506  // 4D input dimensions -- deprecated. Use "input_shape" instead.
507  // If specified, for each input blob there should be four
508  // values specifying the num, channels, height and width of the input blob.
509  // Thus, there should be a total of (4 * #input) numbers.
510  // repeated int32 input_dim = 4;
511 
512  // The layers that make up the net. Each of their configurations, including
513  // connectivity and behavior, is specified as a LayerParameter.
514  // repeated LayerParameter layer = 100; // ID 100 so layers are printed last.
515 
516  // The first four will (if present) be read into the NetParameterInfo
517  // the LayerParameters will be read into the LayerParameterInfo vector.
518 
519  int size = ReadBase128(ifs);
520  std::streamoff posStartOfData = ifs.tellg();
521  ifs.seekg(size, std::ios_base::cur);
522  if(!ifs.good())
523  {
524  throw armnn::ParseException("failed to seek ahead in binary caffe file");
525  }
526  std::streamoff endOfLayer = ifs.tellg();
527  if (fieldInfo.field_id() == 1)
528  {
529  VarLenDataInfo dataInfo = CreateVarLenDataInfo(posStartOfData, endOfLayer);
530  auto graphName = AllocateBuffer(ifs, dataInfo);
531  netParameterInfo.set_name(graphName, dataInfo.SizeOfData());
532  }
533  if (fieldInfo.field_id() == 3)
534  {
535  VarLenDataInfo dataInfo = CreateVarLenDataInfo(posStartOfData, endOfLayer);
536  auto inputName = AllocateBuffer(ifs, dataInfo);
537  netParameterInfo.add_input(inputName, dataInfo.SizeOfData());
538  }
539  if (fieldInfo.field_id() == 8)
540  {
541  VarLenDataInfo dataInfo = CreateVarLenDataInfo(posStartOfData, endOfLayer);
542  auto inputShape = AllocateBuffer(ifs, dataInfo);
543  caffe::BlobShape blobShape;
544  bool bRet = blobShape.ParseFromArray(inputShape.get(), static_cast<int>(dataInfo.SizeOfData()));
545  if (!bRet)
546  {
547  throw armnn::ParseException("Failed to parse input shape");
548  }
549  netParameterInfo.add_blob_shape(blobShape);
550  }
551  if (fieldInfo.field_id() == 4)
552  {
553  int input_dim = ReadBase128(ifs);
554  netParameterInfo.add_input_dimension(input_dim);
555  }
556  if (fieldInfo.field_id() == 100)
557  {
558  LayerParameterInfo info(CreateVarLenDataInfo(posStartOfData, endOfLayer));
559  ReadTopologicalInfoForLayerParameter(info, ifs);
560  layerInfo.push_back(info);
561  }
562  break;
563  }
564  default:
565  {
566  break;
567  }
568  }
569  }
570  std::vector<const LayerParameterInfo*> sortedNodes;
571  ProcessLayers(netParameterInfo, layerInfo, m_RequestedOutputs, sortedNodes);
572  armnn::INetworkPtr networkPtr = LoadLayers(ifs, sortedNodes, netParameterInfo);
573  return networkPtr;
574 
575 }
576 
577 void RecordByRecordCaffeParser::ProcessLayers(
578  const NetParameterInfo& netParameterInfo,
579  std::vector<LayerParameterInfo>& layerInfo,
580  const std::vector<std::string>& m_RequestedOutputs,
581  std::vector<const LayerParameterInfo*>& sortedNodes)
582 {
583  // if there is an implicit input layer add it to the layerInfo list
584  if (netParameterInfo.input_size() > 0)
585  {
586  LayerParameterInfo implicitInputLayer(0, 0);
587  implicitInputLayer.set_type(LayerParameterInfo::INPUT);
588  implicitInputLayer.set_name(netParameterInfo.input(0));
589  implicitInputLayer.add_top(netParameterInfo.input(0));
590  layerInfo.push_back(implicitInputLayer);
591  }
592  ::ResolveInPlaceLayers(layerInfo);
593 
594  for (LayerParameterInfo& info : layerInfo)
595  {
596  for (unsigned long i = 0; i < info.top_size(); ++i)
597  {
598  m_CaffeLayersByTopName[info.top(i)] = &info;
599  }
600  }
601 
602  // Find the output layers the user requested
603  std::vector<const LayerParameterInfo*> targetLayers;
604  for (const std::string& requestedOutputName : m_RequestedOutputs)
605  {
606  auto nodeIt = m_CaffeLayersByTopName.find(requestedOutputName);
607  if (nodeIt == m_CaffeLayersByTopName.end())
608  {
609  throw armnn::ParseException(
610  "Couldn't find requested output layer '" + requestedOutputName + "' in graph");
611  }
612  targetLayers.push_back(nodeIt->second);
613  }
614 
615  // Sort them into a linear ordering such that all inputs of a node are before the node itself
616  if (!armnnUtils::GraphTopologicalSort<const LayerParameterInfo*>(
617  targetLayers,
618  [this](const LayerParameterInfo* node)
619  {
620  return GetInputs(*node);
621  },
622  sortedNodes))
623  {
624  throw armnn::ParseException("Cycle detected in graph");
625  }
626 }
627 
628 
629 std::vector<const LayerParameterInfo*> RecordByRecordCaffeParser::GetInputs(
630  const LayerParameterInfo& layerParam)
631 {
632  std::vector<const LayerParameterInfo*> ret;
633  ret.reserve(layerParam.bottom_size());
634  for (unsigned long j = 0; j < layerParam.bottom_size(); ++j)
635  {
636  std::string inputName = layerParam.bottom(j);
637  auto inputIt = m_CaffeLayersByTopName.find(inputName);
638  if (inputIt == m_CaffeLayersByTopName.end())
639  {
640  throw armnn::ParseException(
641  "Can't find Caffe layer with top called '" + inputName + "', which is listed as an input of '" +
642  layerParam.name() + "'");
643  }
644  ret.push_back(inputIt->second);
645  }
646 
647  return ret;
648 }
649 
650 armnn::INetworkPtr RecordByRecordCaffeParser::LoadLayers(std::ifstream& ifs,
651  std::vector<const LayerParameterInfo *>& sortedNodes,
652  const NetParameterInfo& netParameterInfo)
653 {
654 
657 
659 
660  for (auto info : sortedNodes)
661  {
662  caffe::LayerParameter layer;
663  if (info->isImplicitInputLayer())
664  {
665  // create the matching Layer Parameter programatically from the data in the
666  // net parameter info which has been passed in...
667  layer.set_type(LayerParameterInfo::INPUT);
668  layer.set_name(netParameterInfo.input(0));
669  layer.add_top(netParameterInfo.input(0));
670 
671  caffe::InputParameter* inputParam = layer.mutable_input_param();
672  caffe::BlobShape* shape = inputParam->add_shape();
673 
674  long unsigned int dim_size = netParameterInfo.input_dimensions_size();
675  for (long unsigned int i = 0; i < dim_size; ++i)
676  {
677  shape->add_dim(netParameterInfo.input_dimension(i));
678  }
679  }
680  else
681  {
682  char *buffer = new char[info->SizeOfData()];
683  ifs.clear();
684  ifs.seekg(info->PositionOfData(), std::ios_base::beg);
685  ifs.read(buffer, armnn::numeric_cast<std::streamsize>(info->SizeOfData()));
686  bool bRet = layer.ParseFromArray(buffer, static_cast<int>(info->SizeOfData()));
687  delete[] buffer;
688  if (!bRet)
689  {
690  throw armnn::ParseException("Failed to parse layer [" + info->name() + "]");
691  }
692  }
693 
694  if (info->new_tops())
695  {
696  //update the tops
697  layer.set_top(0, info->top(0));
698  }
699  if (info->new_bottoms())
700  {
701  //update the bottoms
702  layer.set_bottom(0, info->bottom(0));
703  }
704 
705  auto it = ms_CaffeLayerNameToParsingFunctions.find(layer.type());
706  if (it == ms_CaffeLayerNameToParsingFunctions.end())
707  {
708  throw armnn::ParseException("Unsupported layer type '" + layer.type() + "'");
709  }
710  auto func = it->second;
711  (this->*func)(layer);
712  }
713  ifs.close();
714 
715  // Add ArmNN output layers connected to each requested output
716  for (const std::string& requestedOutput : m_RequestedOutputs)
717  {
718  armnn::IOutputSlot& outputSlot = GetArmnnOutputSlotForCaffeTop(requestedOutput);
719 
722  armnn::IConnectableLayer* const outputLayer = m_Network->AddOutputLayer(outputId, requestedOutput.c_str());
723  outputSlot.Connect(outputLayer->GetInputSlot(0));
724 
725  TrackOutputBinding(outputLayer, outputId, outputLayer->GetInputSlot(0).GetConnection()->GetTensorInfo());
726  }
727 
728  Cleanup();
729 
730  return move(m_Network);
731 }
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
armnn::IOutputSlot & GetArmnnOutputSlotForCaffeTop(const std::string &caffeTopName) const
Retrieves the Armnn IOutputSlot representing the given Caffe top.
DataLayout::NCHW false
static const std::map< std::string, OperationParsingFunction > ms_CaffeLayerNameToParsingFunctions
Maps Caffe layer names to parsing member functions.
Caffe networks are loaded from protobuf files (binary or text) using the protobuf library and the gen...
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:202
void TrackOutputBinding(armnn::IConnectableLayer *layer, armnn::LayerBindingId id, const armnn::TensorInfo &tensorInfo)
void ResolveInPlaceLayers(caffe::NetParameter &netParameter)
Modifies the Caffe network to replace "in-place" layers (whose top() and bottom() are both the same) ...
An output connection slot for a layer.
Definition: INetwork.hpp:37
std::unordered_map< std::string, BindingPointInfo > m_NetworkInputsBindingInfo
maps input layer names to their corresponding ids and tensor infos
std::unordered_map< std::string, BindingPointInfo > m_NetworkOutputsBindingInfo
maps output layer names to their corresponding ids and tensor infos
std::map< std::string, armnn::TensorShape > m_InputShapes
virtual armnn::INetworkPtr CreateNetworkFromBinaryFile(const char *graphFile, const std::map< std::string, armnn::TensorShape > &inputShapes, const std::vector< std::string > &requestedOutputs) override
Create the network from a protobuf binary file on disk.
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
Definition: NumericCast.hpp:35
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:46
DataLayout::NCHW DataLayout::NCHW DataLayout::NHWC DataLayout::NHWC true