From 5b5c222f6b0c40a8e0f9ef9dedccd6f0f18c4c2c Mon Sep 17 00:00:00 2001 From: Colm Donelan Date: Wed, 9 Sep 2020 12:48:16 +0100 Subject: IVGCVSW-5297 Remove boost::format from rest of ArmNN. * Replacing calls to boost:format with fmt:format. * TensorUtils.cpp added outputShape.reserve call. Signed-off-by: Colm Donelan Change-Id: I4b2ed0f72039df824a2adca9309b8a9bbb158c5b --- src/armnn/Graph.cpp | 13 +- src/armnn/Layer.cpp | 37 ++-- src/armnn/LoadedNetwork.cpp | 26 ++- src/armnnConverter/ArmnnConverter.cpp | 20 +- src/armnnDeserializer/Deserializer.cpp | 203 +++++++++------------ .../test/ParserFlatbuffersSerializeFixture.hpp | 16 +- src/armnnUtils/ParserHelper.cpp | 16 +- src/armnnUtils/ParserPrototxtFixture.hpp | 57 +++--- src/armnnUtils/TensorIOUtils.hpp | 25 ++- src/armnnUtils/TensorUtils.cpp | 12 +- src/armnnUtils/VerificationHelpers.cpp | 36 ++-- 11 files changed, 204 insertions(+), 257 deletions(-) (limited to 'src') diff --git a/src/armnn/Graph.cpp b/src/armnn/Graph.cpp index a497a45da9..30546e1f0a 100644 --- a/src/armnn/Graph.cpp +++ b/src/armnn/Graph.cpp @@ -16,7 +16,7 @@ #include #include -#include +#include #include #include @@ -312,12 +312,11 @@ void Graph::AddCompatibilityLayers(std::map %3% (%4%) ]") - % srcLayer->GetName() - % srcOutputIndex - % dstLayer.GetName() - % dstInputSlot->GetSlotIndex()); - + const std::string compLayerName = fmt::format("[ {} ({}) -> {} ({}) ]", + srcLayer->GetName(), + srcOutputIndex, + dstLayer.GetName(), + dstInputSlot->GetSlotIndex()); Layer* compLayer = nullptr; if (strategy == EdgeStrategy::CopyToTarget) { diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp index c8d5a1690d..d06b0459f6 100644 --- a/src/armnn/Layer.cpp +++ b/src/armnn/Layer.cpp @@ -10,7 +10,7 @@ #include #include -#include +#include #include @@ -159,8 +159,7 @@ void OutputSlot::ValidateConnectionIndex(unsigned int index) const { if (armnn::numeric_cast(index) >= m_Connections.size()) { - throw InvalidArgumentException( - boost::str(boost::format("GetConnection: Invalid index %1% provided") % index)); + throw InvalidArgumentException((fmt::format("GetConnection: Invalid index {} provided", index))); } } @@ -350,14 +349,12 @@ void Layer::VerifyLayerConnections(unsigned int expectedConnections, const Check if (GetInputSlot(i).GetConnection() == nullptr) { throw LayerValidationException( - boost::str( - boost::format( - "Input connection #%1% must be connected " - "for %2% layer %3% %4%") - % i - % GetLayerTypeAsCString(this->GetType()) - % GetNameStr() - % location.AsString())); + fmt::format("Input connection #{0} must be connected " + "for {1} layer {2} {3}", + i, + GetLayerTypeAsCString(this->GetType()), + GetNameStr(), + location.AsString())); } } } @@ -375,16 +372,14 @@ std::vector Layer::InferOutputShapes(const std::vector if (GetNumInputSlots() != GetNumOutputSlots()) { throw UnimplementedException( - boost::str( - boost::format( - "Default implementation for InferOutputShapes can only be used for " - "layers with the same number of input and output slots. This doesn't " - "hold for %1% layer %2% (#inputs=%3% #outputs=%4%) %5%") - % GetLayerTypeAsCString(this->GetType()) - % GetNameStr() - % GetNumInputSlots() - % GetNumOutputSlots() - % CHECK_LOCATION().AsString())); + fmt::format("Default implementation for InferOutputShapes can only be used for " + "layers with the same number of input and output slots. This doesn't " + "hold for {0} layer {1} (#inputs={2} #outputs={3}) {4}", + GetLayerTypeAsCString(this->GetType()), + GetNameStr(), + GetNumInputSlots(), + GetNumOutputSlots(), + CHECK_LOCATION().AsString())); } return inputShapes; } diff --git a/src/armnn/LoadedNetwork.cpp b/src/armnn/LoadedNetwork.cpp index 33625744c5..00ac90b121 100644 --- a/src/armnn/LoadedNetwork.cpp +++ b/src/armnn/LoadedNetwork.cpp @@ -8,7 +8,6 @@ #include "Graph.hpp" #include "Network.hpp" #include -#include "Runtime.hpp" #include "Profiling.hpp" #include "HeapProfiling.hpp" @@ -23,7 +22,7 @@ #include -#include +#include namespace armnn { @@ -236,9 +235,9 @@ LoadedNetwork::LoadedNetwork(std::unique_ptr net, { const char* const layerName = layer->GetNameStr().length() != 0 ? layer->GetName() : ""; - throw InvalidArgumentException(boost::str( - boost::format("No workload created for layer (name: '%1%' type: '%2%') (compute '%3%')") - % layerName % static_cast(layer->GetType()) % layer->GetBackendId().Get() + throw InvalidArgumentException( + fmt::format("No workload created for layer (name: '{0}' type: '{1}') (compute '{2}')", + layerName, static_cast(layer->GetType()), layer->GetBackendId().Get() )); } @@ -325,7 +324,7 @@ TensorInfo LoadedNetwork::GetInputTensorInfo(LayerBindingId layerId) const } } - throw InvalidArgumentException(boost::str(boost::format("No input layer is associated with id %1%") % layerId)); + throw InvalidArgumentException(fmt::format("No input layer is associated with id {}", layerId)); } TensorInfo LoadedNetwork::GetOutputTensorInfo(LayerBindingId layerId) const @@ -340,7 +339,7 @@ TensorInfo LoadedNetwork::GetOutputTensorInfo(LayerBindingId layerId) const } } - throw InvalidArgumentException(boost::str(boost::format("No output layer is associated with id %1%") % layerId)); + throw InvalidArgumentException(fmt::format("No output layer is associated with id {}", layerId)); } const IWorkloadFactory& LoadedNetwork::GetWorkloadFactory(const Layer& layer) const @@ -350,12 +349,10 @@ const IWorkloadFactory& LoadedNetwork::GetWorkloadFactory(const Layer& layer) co auto it = m_WorkloadFactories.find(layer.GetBackendId()); if (it == m_WorkloadFactories.end()) { - throw RuntimeException( - boost::str( - boost::format("No workload factory for %1% to be used for layer: %2%") - % layer.GetBackendId().Get() - % layer.GetNameStr()), - CHECK_LOCATION()); + throw RuntimeException(fmt::format("No workload factory for {0} to be used for layer: {1}", + layer.GetBackendId().Get(), + layer.GetNameStr()), + CHECK_LOCATION()); } workloadFactory = it->second.first.get(); @@ -411,8 +408,7 @@ static const TensorPin& GetTensorPin(LayerBindingId id, } else { - throw InvalidArgumentException(boost::str( - boost::format("No tensor supplied for %1% %2%") % bindingPointDesc % id)); + throw InvalidArgumentException(fmt::format("No tensor supplied for {0} {1}", bindingPointDesc, id)); } } diff --git a/src/armnnConverter/ArmnnConverter.cpp b/src/armnnConverter/ArmnnConverter.cpp index 21b89ea6f8..c8b9ba9f28 100644 --- a/src/armnnConverter/ArmnnConverter.cpp +++ b/src/armnnConverter/ArmnnConverter.cpp @@ -24,7 +24,7 @@ #include #include "armnn/utility/StringUtils.hpp" -#include +#include #include #include @@ -265,9 +265,9 @@ private: const size_t numInputBindings = m_InputNames.size(); if (numInputShapes < numInputBindings) { - throw armnn::Exception(boost::str(boost::format( - "Not every input has its tensor shape specified: expected=%1%, got=%2%") - % numInputBindings % numInputShapes)); + throw armnn::Exception(fmt::format( + "Not every input has its tensor shape specified: expected={0}, got={1}", + numInputBindings, numInputShapes)); } for (size_t i = 0; i < numInputShapes; i++) @@ -298,9 +298,9 @@ private: const size_t numInputBindings = m_InputNames.size(); if (numInputShapes < numInputBindings) { - throw armnn::Exception(boost::str(boost::format( - "Not every input has its tensor shape specified: expected=%1%, got=%2%") - % numInputBindings % numInputShapes)); + throw armnn::Exception(fmt::format( + "Not every input has its tensor shape specified: expected={0}, got={1}", + numInputBindings, numInputShapes)); } } @@ -325,9 +325,9 @@ private: const size_t numInputBindings = m_InputNames.size(); if (numInputShapes < numInputBindings) { - throw armnn::Exception(boost::str(boost::format( - "Not every input has its tensor shape specified: expected=%1%, got=%2%") - % numInputBindings % numInputShapes)); + throw armnn::Exception(fmt::format( + "Not every input has its tensor shape specified: expected={0}, got={1}", + numInputBindings, numInputShapes)); } } diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp index 4c2f2f1397..1b62484d3e 100644 --- a/src/armnnDeserializer/Deserializer.cpp +++ b/src/armnnDeserializer/Deserializer.cpp @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include @@ -45,24 +45,19 @@ const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits::max(); { if (graph->layers() == nullptr) { - throw ParseException( - boost::str( - boost::format("%1% was called with invalid (null) graph. " - "Possible reason is that the graph is not yet loaded and Unpack(ed). " - "layers:%2% at %3%") % - location.m_Function % - layersIndex % - location.FileLine())); + throw ParseException(fmt::format("{0} was called with invalid (null) graph. " + "Possible reason is that the graph is not yet loaded and Unpack(ed). " + "layers:{1} at {2}", + location.m_Function, + layersIndex, + location.FileLine())); } else if (layersIndex >= graph->layers()->size()) { - throw ParseException( - boost::str( - boost::format("%1% was called with an invalid layers index. " - "layers:%2% at %3%") % - location.m_Function % - layersIndex % - location.FileLine())); + throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}", + location.m_Function, + layersIndex, + location.FileLine())); } } @@ -73,36 +68,30 @@ void CheckLayers(const Deserializer::GraphPtr& graph, { if (graph->layers() == nullptr) { - throw ParseException( - boost::str( - boost::format("%1% was called with invalid (null) graph. " - "Possible reason is that the graph is not yet loaded and Unpack(ed). " - "layers:%2% at %3%") % - location.m_Function % - layersIndex % - location.FileLine())); + throw ParseException(fmt::format("{0} was called with invalid (null) graph. " + "Possible reason is that the graph is not yet loaded and Unpack(ed). " + "layers:{1} at {2}", + location.m_Function, + layersIndex, + location.FileLine())); } else if (layersIndex >= graph->layers()->size()) { - throw ParseException( - boost::str( - boost::format("%1% was called with an invalid layers index. " - "layers:%2% at %3%") % - location.m_Function % - layersIndex % - location.FileLine())); + throw ParseException(fmt::format("{0} was called with an invalid layers index. " + "layers:{1} at {2}", + location.m_Function, + layersIndex, + location.FileLine())); } else if (layerIndex >= graph->layers()[layersIndex].size() && layerIndex != VIRTUAL_LAYER_ID) { - throw ParseException( - boost::str( - boost::format("%1% was called with an invalid layer index. " - "layers:%2% layer:%3% at %4%") % - location.m_Function % - layersIndex % - layerIndex % - location.FileLine())); + throw ParseException(fmt::format("{0} was called with an invalid layer index. " + "layers:{1} layer:{2} at {3}", + location.m_Function, + layersIndex, + layerIndex, + location.FileLine())); } } @@ -111,13 +100,9 @@ void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr, { if (rawPtr == nullptr) { - throw ParseException( - boost::str( - boost::format("%1% was called with a null tensor pointer. " - "at %2%") % - location.m_Function % - location.FileLine())); - + throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}", + location.m_Function, + location.FileLine())); } } @@ -126,9 +111,9 @@ void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr, { if (rawPtr == nullptr) { - throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") % - location.m_Function % - location.FileLine())); + throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}", + location.m_Function, + location.FileLine())); } } @@ -138,9 +123,9 @@ void CheckConstTensorSize(const unsigned int constTensorSize, { if (constTensorSize != tensorSize) { - throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") % - location.m_Function % - location.FileLine())); + throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}", + location.m_Function, + location.FileLine())); } } @@ -368,9 +353,7 @@ Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPt return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base(); case Layer::Layer_NONE: default: - throw ParseException(boost::str( - boost::format("Layer type %1% not recognized") % - layerType)); + throw ParseException(fmt::format("Layer type {} not recognized", layerType)); } } @@ -539,12 +522,10 @@ armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr) default: { CheckLocation location = CHECK_LOCATION(); - throw ParseException( - boost::str( - boost::format("Unsupported data type %1% = %2%. %3%") % - tensorPtr->dataType() % - EnumNameDataType(tensorPtr->dataType()) % - location.AsString())); + throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}", + tensorPtr->dataType(), + EnumNameDataType(tensorPtr->dataType()), + location.AsString())); } } @@ -624,11 +605,10 @@ armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr) default: { CheckLocation location = CHECK_LOCATION(); - throw ParseException( - boost::str(boost::format("Unsupported data type %1% = %2%. %3%") % - constTensorPtr->data_type() % - EnumNameConstTensorData(constTensorPtr->data_type()) % - location.AsString())); + throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}", + constTensorPtr->data_type(), + EnumNameConstTensorData(constTensorPtr->data_type()), + location.AsString())); } } } @@ -671,14 +651,11 @@ void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex { CHECK_LAYERS(graph, 0, layerIndex); const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str(); - throw ParseException( - boost::str( - boost::format("Layer not supported. " - "layerIndex: %1% " - "layerName: %2% / %3%") % - layerIndex % - layerName % - CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("Layer not supported. layerIndex: {0} " + "layerName: {1} / {2}", + layerIndex, + layerName, + CHECK_LOCATION().AsString())); } void Deserializer::ResetParser() @@ -722,17 +699,16 @@ Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryCo { if (binaryContent == nullptr) { - throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") % - CHECK_LOCATION().AsString())); + throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}", + CHECK_LOCATION().AsString())); } flatbuffers::Verifier verifier(binaryContent, len); if (verifier.VerifyBuffer() == false) { - throw ParseException( - boost::str(boost::format("Buffer doesn't conform to the expected Armnn " - "flatbuffers format. size:%1% %2%") % - len % - CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn " + "flatbuffers format. size:{0} {1}", + len, + CHECK_LOCATION().AsString())); } return GetSerializedGraph(binaryContent); } @@ -789,11 +765,9 @@ BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerInde return inputBinding.second; } } - throw ParseException( - boost::str( - boost::format("No input binding found for layer:%1% / %2%") % - name % - CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("No input binding found for layer:{0} / {1}", + name, + CHECK_LOCATION().AsString())); } BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex, @@ -807,11 +781,9 @@ BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerInd return outputBinding.second; } } - throw ParseException( - boost::str( - boost::format("No output binding found for layer:%1% / %2%") % - name % - CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("No output binding found for layer:{0} / {1}", + name, + CHECK_LOCATION().AsString())); } unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId) @@ -963,13 +935,12 @@ void Deserializer::RegisterOutputSlots(GraphPtr graph, LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex); if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots()) { - throw ParseException( - boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)" - " for layer index: %3% %4%") % - baseLayer->outputSlots()->size() % - layer->GetNumOutputSlots() % - layerIndex % - CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})" + " for layer index: {2} {3}", + baseLayer->outputSlots()->size(), + layer->GetNumOutputSlots(), + layerIndex, + CHECK_LOCATION().AsString())); } for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i) @@ -990,13 +961,12 @@ void Deserializer::RegisterInputSlots(GraphPtr graph, LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex); if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots()) { - throw ParseException( - boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)" - " for layer index:%3% %4%") % - baseLayer->inputSlots()->size() % - layer->GetNumInputSlots() % - layerIndex % - CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})" + " for layer index:{2} {3}", + baseLayer->inputSlots()->size(), + layer->GetNumInputSlots(), + layerIndex, + CHECK_LOCATION().AsString())); } for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i) @@ -1157,8 +1127,7 @@ void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex) if (flatBufferCrops->Length() % 2 != 0) { - throw ParseException(boost::str( - boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString())); } std::vector> crops; @@ -1814,8 +1783,8 @@ void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex) if (flatBufferPadList->Length() % 2 != 0) { - throw ParseException(boost::str( - boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}", + CHECK_LOCATION().AsString())); } std::vector> padList; @@ -2001,8 +1970,8 @@ armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& in { if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end()) { - throw ParseException(boost::str( - boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("At most one component of shape can be -1 {}", + CHECK_LOCATION().AsString())); } auto targetNumElements = @@ -2183,8 +2152,8 @@ void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex) if (flatBufferPadList->Length() % 2 != 0) { - throw ParseException(boost::str( - boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}", + CHECK_LOCATION().AsString())); } std::vector> padList; @@ -2367,8 +2336,8 @@ void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex) if (fbBegin->Length() != fbSize->Length()) { - throw ParseException(boost::str( - boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("Begin and size descriptors must have the same length {}", + CHECK_LOCATION().AsString())); } armnn::SliceDescriptor descriptor; @@ -2404,8 +2373,8 @@ void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex) if (!(flatBufferBegin->Length() == flatBufferEnd->Length() && flatBufferBegin->Length() == flatBufferStride->Length())) { - throw ParseException(boost::str( - boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString())); + throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}", + CHECK_LOCATION().AsString())); } std::vector begin(flatBufferBegin->begin(), flatBufferBegin->end()); diff --git a/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp index bb38d5f4b4..cea6a43454 100644 --- a/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp +++ b/src/armnnDeserializer/test/ParserFlatbuffersSerializeFixture.hpp @@ -18,7 +18,7 @@ #include #include -#include +#include using armnnDeserializer::IDeserializer; @@ -68,14 +68,12 @@ struct ParserFlatbuffersSerializeFixture if (ret != armnn::Status::Success) { - throw armnn::Exception( - boost::str( - boost::format("The runtime failed to load the network. " - "Error was: %1%. in %2% [%3%:%4%]") % - errorMessage % - __func__ % - __FILE__ % - __LINE__)); + throw armnn::Exception(fmt::format("The runtime failed to load the network. " + "Error was: {0}. in {1} [{2}:{3}]", + errorMessage, + __func__, + __FILE__, + __LINE__)); } } diff --git a/src/armnnUtils/ParserHelper.cpp b/src/armnnUtils/ParserHelper.cpp index 9709773014..af8014d112 100644 --- a/src/armnnUtils/ParserHelper.cpp +++ b/src/armnnUtils/ParserHelper.cpp @@ -8,7 +8,7 @@ #include #include -#include +#include namespace armnnUtils { @@ -27,14 +27,12 @@ void ProcessConcatInputTensorInfo(armnn::TensorInfo& inputTensorInfo, // double check dimensions of the tensors if (inputTensorInfo.GetNumDimensions() != inputRank) { - throw armnn::ParseException( - boost::str( - boost::format( - "The number of dimensions: %1% for input tensors of the " - "concatenation op should be %2% %3%") - % inputTensorInfo.GetNumDimensions() - % inputRank - % CHECK_LOCATION().AsString())); + throw armnn::ParseException(fmt::format( + "The number of dimensions: {0} for input tensors of the " + "concatenation op should be {1} {2}", + inputTensorInfo.GetNumDimensions(), + inputRank, + CHECK_LOCATION().AsString())); } for (unsigned int j = 0; j < concatAxis; ++j) diff --git a/src/armnnUtils/ParserPrototxtFixture.hpp b/src/armnnUtils/ParserPrototxtFixture.hpp index 8356117e1f..cf28fcf513 100644 --- a/src/armnnUtils/ParserPrototxtFixture.hpp +++ b/src/armnnUtils/ParserPrototxtFixture.hpp @@ -6,13 +6,13 @@ #pragma once #include - #include #include #include #include +#include #include #include @@ -129,10 +129,9 @@ void ParserPrototxtFixture::Setup(const std::mapLoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage); if (ret != armnn::Status::Success) { - throw armnn::Exception(boost::str( - boost::format("LoadNetwork failed with error: '%1%' %2%") - % errorMessage - % CHECK_LOCATION().AsString())); + throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}", + errorMessage, + CHECK_LOCATION().AsString())); } } @@ -147,10 +146,9 @@ void ParserPrototxtFixture::Setup() armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage); if (ret != armnn::Status::Success) { - throw armnn::Exception(boost::str( - boost::format("LoadNetwork failed with error: '%1%' %2%") - % errorMessage - % CHECK_LOCATION().AsString())); + throw armnn::Exception(fmt::format("LoadNetwork failed with error: '{0}' {1}", + errorMessage, + CHECK_LOCATION().AsString())); } } @@ -214,13 +212,12 @@ void ParserPrototxtFixture::RunTest(const std::mapGetNetworkOutputBindingInfo(it.first); if (bindingInfo.second.GetNumElements() != it.second.size()) { - throw armnn::Exception( - boost::str(boost::format("Output tensor %1% is expected to have %2% elements. " - "%3% elements supplied. %4%") % - it.first % - bindingInfo.second.GetNumElements() % - it.second.size() % - CHECK_LOCATION().AsString())); + throw armnn::Exception(fmt::format("Output tensor {0} is expected to have {1} elements. " + "{2} elements supplied. {3}", + it.first, + bindingInfo.second.GetNumElements(), + it.second.size(), + CHECK_LOCATION().AsString())); } // If the expected output shape is set, the output tensor checks will be carried out. @@ -234,25 +231,25 @@ void ParserPrototxtFixture::RunTest(const std::map -#include +#include #include namespace armnnUtils @@ -22,10 +22,10 @@ inline armnn::InputTensors MakeInputTensors(const std::vector #include -#include +#include using namespace armnn; @@ -88,11 +88,10 @@ TensorShape ExpandDims(const TensorShape& tensorShape, int axis) if (axis < -armnn::numeric_cast(outputDim) || axis > armnn::numeric_cast(tensorShape.GetNumDimensions())) { - throw InvalidArgumentException( - boost::str(boost::format("Invalid expansion axis %1% for %2%D input tensor. %3%") % - axis % - tensorShape.GetNumDimensions() % - CHECK_LOCATION().AsString())); + throw InvalidArgumentException(fmt::format("Invalid expansion axis {} for {}D input tensor. {}", + axis, + tensorShape.GetNumDimensions(), + CHECK_LOCATION().AsString())); } if (axis < 0) @@ -101,6 +100,7 @@ TensorShape ExpandDims(const TensorShape& tensorShape, int axis) } std::vector outputShape; + outputShape.reserve(tensorShape.GetNumDimensions()); for (unsigned int i = 0; i < tensorShape.GetNumDimensions(); ++i) { outputShape.push_back(tensorShape[i]); diff --git a/src/armnnUtils/VerificationHelpers.cpp b/src/armnnUtils/VerificationHelpers.cpp index 243d22e444..a4db97adf4 100644 --- a/src/armnnUtils/VerificationHelpers.cpp +++ b/src/armnnUtils/VerificationHelpers.cpp @@ -4,9 +4,10 @@ // #include "VerificationHelpers.hpp" -#include #include +#include + using namespace armnn; namespace armnnUtils @@ -23,13 +24,11 @@ void CheckValidSize(std::initializer_list validInputCounts, [&actualValue](size_t x) { return x == actualValue; } ); if (!isValid) { - throw ParseException( - boost::str( - boost::format("%1% = %2% is not valid, not in {%3%}. %4%") % - actualExpr % - actualValue % - validExpr % - location.AsString())); + throw ParseException(fmt::format("{} = {} is not valid, not in {{}}. {}", + actualExpr, + actualValue, + validExpr, + location.AsString())); } } @@ -39,12 +38,10 @@ uint32_t NonNegative(const char* expr, { if (value < 0) { - throw ParseException( - boost::str( - boost::format("'%1%' must be non-negative, received: %2% at %3%") % - expr % - value % - location.AsString() )); + throw ParseException(fmt::format("'{}' must be non-negative, received: {} at {}", + expr, + value, + location.AsString())); } else { @@ -58,12 +55,11 @@ int32_t VerifyInt32(const char* expr, { if (value < std::numeric_limits::min() || value > std::numeric_limits::max()) { - throw ParseException( - boost::str( - boost::format("'%1%' must should fit into a int32 (ArmNN don't support int64), received: %2% at %3%") % - expr % - value % - location.AsString() )); + throw ParseException(fmt::format("'{}' must should fit into a int32 (ArmNN don't support int64)," + " received: {} at {}", + expr, + value, + location.AsString())); } else { -- cgit v1.2.1