14 #include <boost/numeric/conversion/cast.hpp> 17 #include <google/protobuf/wire_format.h> 33 VarLenDataInfo(std::streamoff positionOfData,
size_t sizeOfData) :
34 m_PositionOfData(positionOfData), m_SizeOfData(sizeOfData) {}
36 VarLenDataInfo(
const VarLenDataInfo& x) :
37 m_PositionOfData(x.PositionOfData()), m_SizeOfData (x.SizeOfData()) {}
39 VarLenDataInfo& operator=(
const VarLenDataInfo& x)
45 m_PositionOfData = x.PositionOfData(); m_SizeOfData = x.SizeOfData();
return *
this;
48 std::streamoff PositionOfData()
const {
return m_PositionOfData;}
49 size_t SizeOfData()
const {
return m_SizeOfData;}
52 std::streamoff m_PositionOfData;
62 class LayerParameterInfo :
public VarLenDataInfo
65 static const std::string INPUT;
66 LayerParameterInfo(
const VarLenDataInfo& varLenDataInfo) :
67 VarLenDataInfo(varLenDataInfo.PositionOfData(), varLenDataInfo.SizeOfData()),
70 LayerParameterInfo(std::streamoff positionOfData,
size_t sizeOfData) :
71 VarLenDataInfo(positionOfData, sizeOfData), m_newTops(
false), m_newBottoms(
false) {}
73 LayerParameterInfo(
const LayerParameterInfo& x) :
74 VarLenDataInfo(x.PositionOfData(), x.SizeOfData()),
78 m_bottoms(x.m_bottoms),
79 m_newTops(x.m_newTops),
80 m_newBottoms(x.m_newBottoms) {}
82 LayerParameterInfo& operator=(
const LayerParameterInfo& x)
87 VarLenDataInfo::operator=(x);
91 m_bottoms = x.m_bottoms;
92 m_newTops = x.m_newTops;
93 m_newBottoms = x.m_newBottoms;
97 const std::string name()
const {
return m_name;}
98 void set_name(
const std::unique_ptr<
char[]>& theName,
size_t length)
100 m_name = std::string(theName.get(), length);
102 void set_name(
const std::string& theName) {m_name = theName;}
104 const std::string type()
const {
return m_type;}
105 void set_type(
const std::unique_ptr<
char[]>& theType,
size_t length)
107 m_type = std::string(theType.get(), length);
109 void set_type(
const std::string& theType) {m_type = theType;}
111 void add_top(
const std::unique_ptr<
char[]>& top,
size_t length)
113 std::string topName(top.get(), length);
114 m_tops.push_back(topName);
116 void add_top(
const std::string& topName)
118 m_tops.push_back(topName);
120 const std::string top(
unsigned long i)
const {
return m_tops[i];}
121 unsigned long top_size()
const {
return m_tops.size();}
122 void set_top(
unsigned long i,
const std::string& newName) {m_tops[i] = newName; m_newTops =
true;}
123 bool new_tops()
const {
return m_newTops;}
125 void add_bottom(
const std::unique_ptr<
char[]>& bottom,
size_t length)
127 std::string bottomName(bottom.get(), length);
128 m_bottoms.push_back(bottomName);
130 unsigned long bottom_size()
const {
return m_bottoms.size();}
131 const std::string bottom(
unsigned long i)
const {
return m_bottoms[i];}
132 void set_bottom(
unsigned long i,
const std::string& newName) {m_bottoms[i] = newName; m_newBottoms =
true;}
133 bool new_bottoms()
const {
return m_newBottoms;}
137 bool isImplicitInputLayer()
const 139 if ((PositionOfData() == 0) && (SizeOfData() == 0) && INPUT.compare(type()) == 0)
140 {
return true;}
else {
return false;}
146 std::vector<std::string> m_tops;
147 std::vector<std::string> m_bottoms;
159 class ProtobufFieldInfo
162 ProtobufFieldInfo(
int field_type,
int field_id) :
163 m_eof(
false), m_field_type(field_type), m_field_id(field_id) {}
164 ProtobufFieldInfo() : m_eof(
true), m_field_type(0), m_field_id(0) {}
166 bool eof() {
return m_eof;}
167 int field_type() {
return m_field_type;}
168 int field_id() {
return m_field_id;}
182 class NetParameterInfo
185 const std::string name()
const {
return m_name;}
186 void set_name(
const std::unique_ptr<
char[]>& theName,
size_t length)
188 m_name = std::string(theName.get(), length);
191 void add_input(
const std::unique_ptr<
char[]>& input,
size_t length)
193 std::string inputName(input.get(), length);
194 m_inputs.push_back(inputName);
196 const std::string input(
unsigned long i)
const {
return m_inputs[i];}
197 unsigned long input_size()
const {
return m_inputs.size();}
199 void add_input_dimension(
int input_dimension) {
200 m_input_dimensions.push_back(input_dimension);
202 int input_dimension(
unsigned long i)
const {
return m_input_dimensions[i];}
203 unsigned long input_dimensions_size()
const {
return m_input_dimensions.size();}
205 void add_blob_shape(caffe::BlobShape shape) {
206 m_blob_shapes.push_back(shape);
208 const caffe::BlobShape blob_shape(
unsigned long i)
const {
return m_blob_shapes[i];}
209 unsigned long blob_shapes_size()
const {
return m_blob_shapes.size();}
213 std::vector<std::string> m_inputs;
214 std::vector<int> m_input_dimensions;
215 std::vector<caffe::BlobShape> m_blob_shapes;
224 const std::string LayerParameterInfo::INPUT =
"Input";
229 ProtobufFieldInfo readFieldInfo(std::ifstream& ifs)
231 unsigned char first_byte =
static_cast<unsigned char>(ifs.get());
234 ProtobufFieldInfo eof;
237 int field_type = first_byte&7;
238 int field_id = first_byte>>3;
239 if ((field_id & 16) == 16)
241 unsigned char second_byte =
static_cast<unsigned char>(ifs.get());
244 ProtobufFieldInfo eof;
247 field_id = (field_id-16) + ((second_byte&127)<<4);
249 ProtobufFieldInfo fieldInfo(field_type, field_id);
253 const static int MAX_NUM_BYTES = 5;
255 int ReadBase128(std::ifstream& ifs)
258 unsigned int shift_by = 0;
262 unsigned char a_byte =
static_cast<unsigned char>(ifs.get());
264 if (bytesRead > MAX_NUM_BYTES)
267 "ReadBase128 exceeded the maximum number of bytes expected for an integer representation");
269 result += (a_byte & 127) << shift_by;
271 if ((a_byte & 128) != 128)
280 std::unique_ptr<char[]> AllocateBuffer(std::ifstream& ifs, VarLenDataInfo& dataInfo)
282 std::unique_ptr<char[]> ptr(
new char[dataInfo.SizeOfData()]);
284 ifs.seekg(dataInfo.PositionOfData(), std::ios_base::beg);
285 ifs.read(ptr.get(), boost::numeric_cast<std::streamsize>(dataInfo.SizeOfData()));
289 VarLenDataInfo CreateVarLenDataInfo(std::streamoff bufferStart, std::streamoff endOfLayer) {
290 std::streamoff sizeOfLayer = endOfLayer - bufferStart;
293 std::stringstream ss;
294 ss <<
"error when determining buffer size, negative value [" << sizeOfLayer <<
"]";
302 if (boost::numeric_cast<size_t>(sizeOfLayer) > SIZE_MAX) {
303 std::stringstream ss;
304 ss <<
"layer is greater than " << SIZE_MAX <<
" in size cannot process. layer size = [" << sizeOfLayer <<
"]";
307 LayerParameterInfo
info(bufferStart, boost::numeric_cast<size_t>(sizeOfLayer));
311 void ReadTopologicalInfoForLayerParameter(LayerParameterInfo& layerInfo, std::ifstream& ifs)
315 ifs.seekg(layerInfo.PositionOfData(), std::ios_base::beg);
316 std::streamoff endOfLayer = layerInfo.PositionOfData() +
317 boost::numeric_cast<std::streamoff>(layerInfo.SizeOfData());
321 std::streamoff currentPosition = ifs.tellg();
322 if (currentPosition >= endOfLayer) {
326 ProtobufFieldInfo fieldInfo = readFieldInfo(ifs);
334 switch (fieldInfo.field_type())
343 int size = ReadBase128(ifs);
344 std::streamoff posStartOfData = ifs.tellg();
345 VarLenDataInfo dataInfo(posStartOfData, boost::numeric_cast<size_t>(size));
350 if (fieldInfo.field_id() == 1)
353 auto layerName = AllocateBuffer(ifs, dataInfo);
354 layerInfo.set_name(layerName, dataInfo.SizeOfData());
356 else if (fieldInfo.field_id() == 2)
359 auto layerType = AllocateBuffer(ifs, dataInfo);
360 layerInfo.set_type(layerType, dataInfo.SizeOfData());
362 else if (fieldInfo.field_id() == 3)
365 auto bottom = AllocateBuffer(ifs, dataInfo);
366 layerInfo.add_bottom(bottom, dataInfo.SizeOfData());
368 else if (fieldInfo.field_id() == 4)
371 auto top = AllocateBuffer(ifs, dataInfo);
372 layerInfo.add_top(top, dataInfo.SizeOfData());
376 ifs.seekg(size, std::ios_base::cur);
389 ifs.seekg(8, std::ios_base::cur);
401 ifs.seekg(4, std::ios_base::cur);
418 void ResolveInPlaceLayers(std::vector<LayerParameterInfo>& layerInfo)
420 std::map<std::string, std::vector<LayerParameterInfo*>> layersByTop;
421 for (
auto& info : layerInfo)
423 for (
unsigned long i = 0; i <
info.top_size(); ++i)
425 layersByTop[
info.top(i)].push_back(&info);
430 for (
auto& layersWithSameTopIterator : layersByTop)
432 const std::string& top = layersWithSameTopIterator.first;
433 const std::vector<LayerParameterInfo*> layersWithSameTop = layersWithSameTopIterator.second;
437 for (
unsigned int layerIdx = 0; layerIdx < layersWithSameTop.size() - 1; ++layerIdx)
439 LayerParameterInfo* layer1 = layersWithSameTop[layerIdx];
440 LayerParameterInfo* layer2 = layersWithSameTop[layerIdx + 1];
441 if (layer1->top_size() != 1)
444 "doesn't have exactly one top.");
446 std::string newTop = layer1->name() +
"_top";
447 layer1->set_top(0, newTop);
448 if (layer2->bottom_size() != 1 || layer2->bottom(0) != top)
451 " doesn't have exactly one bottom, or it doesn't match its top.");
453 layer2->set_bottom(0, newTop);
465 const char* graphFile,
466 const std::map<std::string, armnn::TensorShape>& inputShapes,
467 const std::vector<std::string>& requestedOutputs)
471 if (requestedOutputs.size() == 0)
475 m_RequestedOutputs = requestedOutputs;
477 std::ifstream ifs(graphFile, std::ifstream::in|std::ifstream::binary);
483 std::vector<LayerParameterInfo> layerInfo;
484 NetParameterInfo netParameterInfo;
487 ProtobufFieldInfo fieldInfo = readFieldInfo(ifs);
492 switch(fieldInfo.field_type())
521 int size = ReadBase128(ifs);
522 std::streamoff posStartOfData = ifs.tellg();
523 ifs.seekg(size, std::ios_base::cur);
528 std::streamoff endOfLayer = ifs.tellg();
529 if (fieldInfo.field_id() == 1)
531 VarLenDataInfo dataInfo = CreateVarLenDataInfo(posStartOfData, endOfLayer);
532 auto graphName = AllocateBuffer(ifs, dataInfo);
533 netParameterInfo.set_name(graphName, dataInfo.SizeOfData());
535 if (fieldInfo.field_id() == 3)
537 VarLenDataInfo dataInfo = CreateVarLenDataInfo(posStartOfData, endOfLayer);
538 auto inputName = AllocateBuffer(ifs, dataInfo);
539 netParameterInfo.add_input(inputName, dataInfo.SizeOfData());
541 if (fieldInfo.field_id() == 8)
543 VarLenDataInfo dataInfo = CreateVarLenDataInfo(posStartOfData, endOfLayer);
544 auto inputShape = AllocateBuffer(ifs, dataInfo);
545 caffe::BlobShape blobShape;
546 bool bRet = blobShape.ParseFromArray(inputShape.get(),
static_cast<int>(dataInfo.SizeOfData()));
551 netParameterInfo.add_blob_shape(blobShape);
553 if (fieldInfo.field_id() == 4)
555 int input_dim = ReadBase128(ifs);
556 netParameterInfo.add_input_dimension(input_dim);
558 if (fieldInfo.field_id() == 100)
560 LayerParameterInfo
info(CreateVarLenDataInfo(posStartOfData, endOfLayer));
561 ReadTopologicalInfoForLayerParameter(info, ifs);
562 layerInfo.push_back(info);
572 std::vector<const LayerParameterInfo*> sortedNodes;
573 ProcessLayers(netParameterInfo, layerInfo, m_RequestedOutputs, sortedNodes);
579 void RecordByRecordCaffeParser::ProcessLayers(
580 const NetParameterInfo& netParameterInfo,
581 std::vector<LayerParameterInfo>& layerInfo,
582 const std::vector<std::string>& m_RequestedOutputs,
583 std::vector<const LayerParameterInfo*>& sortedNodes)
586 if (netParameterInfo.input_size() > 0)
588 LayerParameterInfo implicitInputLayer(0, 0);
589 implicitInputLayer.set_type(LayerParameterInfo::INPUT);
590 implicitInputLayer.set_name(netParameterInfo.input(0));
591 implicitInputLayer.add_top(netParameterInfo.input(0));
592 layerInfo.push_back(implicitInputLayer);
596 for (LayerParameterInfo&
info : layerInfo)
598 for (
unsigned long i = 0; i <
info.top_size(); ++i)
600 m_CaffeLayersByTopName[
info.top(i)] = &
info;
605 std::vector<const LayerParameterInfo*> targetLayers;
606 for (
const std::string& requestedOutputName : m_RequestedOutputs)
608 auto nodeIt = m_CaffeLayersByTopName.find(requestedOutputName);
609 if (nodeIt == m_CaffeLayersByTopName.end())
612 "Couldn't find requested output layer '" + requestedOutputName +
"' in graph");
614 targetLayers.push_back(nodeIt->second);
618 if (!armnnUtils::GraphTopologicalSort<const LayerParameterInfo*>(
620 [
this](
const LayerParameterInfo* node)
622 return GetInputs(*node);
631 std::vector<const LayerParameterInfo*> RecordByRecordCaffeParser::GetInputs(
632 const LayerParameterInfo& layerParam)
634 std::vector<const LayerParameterInfo*> ret;
635 ret.reserve(layerParam.bottom_size());
636 for (
unsigned long j = 0; j < layerParam.bottom_size(); ++j)
638 std::string inputName = layerParam.bottom(j);
639 auto inputIt = m_CaffeLayersByTopName.find(inputName);
640 if (inputIt == m_CaffeLayersByTopName.end())
643 "Can't find Caffe layer with top called '" + inputName +
"', which is listed as an input of '" +
644 layerParam.name() +
"'");
646 ret.push_back(inputIt->second);
653 std::vector<const LayerParameterInfo *>& sortedNodes,
654 const NetParameterInfo& netParameterInfo)
662 for (
auto info : sortedNodes)
664 caffe::LayerParameter layer;
665 if (
info->isImplicitInputLayer())
669 layer.set_type(LayerParameterInfo::INPUT);
670 layer.set_name(netParameterInfo.input(0));
671 layer.add_top(netParameterInfo.input(0));
673 caffe::InputParameter* inputParam = layer.mutable_input_param();
674 caffe::BlobShape* shape = inputParam->add_shape();
676 long unsigned int dim_size = netParameterInfo.input_dimensions_size();
677 for (
long unsigned int i = 0; i < dim_size; ++i)
679 shape->add_dim(netParameterInfo.input_dimension(i));
684 char *buffer =
new char[
info->SizeOfData()];
686 ifs.seekg(
info->PositionOfData(), std::ios_base::beg);
687 ifs.read(buffer, boost::numeric_cast<std::streamsize>(
info->SizeOfData()));
688 bool bRet = layer.ParseFromArray(buffer, static_cast<int>(
info->SizeOfData()));
696 if (
info->new_tops())
699 layer.set_top(0,
info->top(0));
701 if (
info->new_bottoms())
704 layer.set_bottom(0,
info->bottom(0));
712 auto func = it->second;
713 (this->*func)(layer);
718 for (
const std::string& requestedOutput : m_RequestedOutputs)
725 outputSlot.
Connect(outputLayer->GetInputSlot(0));
727 TrackOutputBinding(outputLayer, outputId, outputLayer->GetInputSlot(0).GetConnection()->GetTensorInfo());
armnn::IOutputSlot & GetArmnnOutputSlotForCaffeTop(const std::string &caffeTopName) const
void TrackOutputBinding(armnn::IConnectableLayer *layer, armnn::LayerBindingId id, const armnn::TensorInfo &tensorInfo)
void ResolveInPlaceLayers(caffe::NetParameter &netParameter)
DataLayout::NCHW DataLayout::NCHW DataLayout::NHWC DataLayout::NHWC true
static INetworkPtr Create()
std::unordered_map< std::string, BindingPointInfo > m_NetworkOutputsBindingInfo
maps output layer names to their corresponding ids and tensor infos
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
armnn::INetworkPtr m_Network
An output connection slot for a layer. The output slot may be connected to 1 or more input slots of s...
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
std::unordered_map< std::string, BindingPointInfo > m_NetworkInputsBindingInfo
maps input layer names to their corresponding ids and tensor infos
RecordByRecordCaffeParser()
virtual int Connect(IInputSlot &destination)=0
virtual armnn::INetworkPtr CreateNetworkFromBinaryFile(const char *graphFile, const std::map< std::string, armnn::TensorShape > &inputShapes, const std::vector< std::string > &requestedOutputs) override
Create the network from a protobuf binary file on disk.
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
std::map< std::string, armnn::TensorShape > m_InputShapes
static const std::map< std::string, OperationParsingFunction > ms_CaffeLayerNameToParsingFunctions
Maps Caffe layer names to parsing member functions.