56 namespace po = boost::program_options;
58 std::vector<armnn::BackendId> computeDevice;
60 std::string modelPath;
61 std::string modelFormat;
63 std::string inputName;
64 std::string inputLayout;
65 std::string outputName;
66 std::string modelOutputLabelsPath;
67 std::string validationLabelPath;
68 std::string validationRange;
69 std::string blacklistPath;
71 const std::string backendsMessage =
"Which device to run layers on by default. Possible choices: " 74 po::options_description desc(
"Options");
79 (
"help,h",
"Display help messages")
80 (
"model-path,m", po::value<std::string>(&modelPath)->required(),
"Path to armnn format model file")
81 (
"model-format,f", po::value<std::string>(&modelFormat)->required(),
82 "The model format. Supported values: caffe, tensorflow, tflite")
83 (
"input-name,i", po::value<std::string>(&inputName)->required(),
84 "Identifier of the input tensors in the network separated by comma.")
85 (
"output-name,o", po::value<std::string>(&outputName)->required(),
86 "Identifier of the output tensors in the network separated by comma.")
87 (
"data-dir,d", po::value<std::string>(&dataDir)->required(),
88 "Path to directory containing the ImageNet test data")
89 (
"model-output-labels,p", po::value<std::string>(&modelOutputLabelsPath)->required(),
90 "Path to model output labels file.")
91 (
"validation-labels-path,v", po::value<std::string>(&validationLabelPath)->required(),
92 "Path to ImageNet Validation Label file")
93 (
"data-layout,l", po::value<std::string>(&inputLayout)->default_value(
"NHWC"),
94 "Data layout. Supported value: NHWC, NCHW. Default: NHWC")
95 (
"compute,c", po::value<std::vector<armnn::BackendId>>(&computeDevice)->default_value(defaultBackends),
96 backendsMessage.c_str())
97 (
"validation-range,r", po::value<std::string>(&validationRange)->default_value(
"1:0"),
98 "The range of the images to be evaluated. Specified in the form <begin index>:<end index>." 99 "The index starts at 1 and the range is inclusive." 100 "By default the evaluation will be performed on all images.")
101 (
"blacklist-path,b", po::value<std::string>(&blacklistPath)->default_value(
""),
102 "Path to a blacklist file where each line denotes the index of an image to be " 103 "excluded from evaluation.");
105 catch (
const std::exception& e)
111 std::cerr <<
"Fatal internal error: " << e.what() << std::endl;
115 po::variables_map vm;
118 po::store(po::parse_command_line(argc, argv, desc), vm);
120 if (vm.count(
"help"))
122 std::cout << desc << std::endl;
129 std::cerr << e.what() << std::endl << std::endl;
130 std::cerr << desc << std::endl;
135 std::string invalidBackends;
138 ARMNN_LOG(fatal) <<
"The list of preferred devices contains invalid backend IDs: " 147 std::ifstream file(modelPath);
151 auto armnnparser(IParser::Create());
160 optimizedNet =
armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
164 std::stringstream message;
165 message <<
"armnn::Exception (" << e.
what() <<
") caught from optimize.";
172 status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
175 ARMNN_LOG(fatal) <<
"armnn::IRuntime: Failed to load network";
183 inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, inputName);
185 std::pair<armnn::LayerBindingId, armnn::TensorInfo>
187 std::vector<BindingPointInfo> inputBindings = { m_InputBindingInfo };
190 outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, outputName);
192 std::pair<armnn::LayerBindingId, armnn::TensorInfo>
194 std::vector<BindingPointInfo> outputBindings = { m_OutputBindingInfo };
197 if (modelOutputLabelsPath.empty() || !fs::exists(modelOutputLabelsPath) ||
198 !fs::is_regular_file(modelOutputLabelsPath))
200 ARMNN_LOG(fatal) <<
"Invalid model output labels path at " << modelOutputLabelsPath;
202 const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
207 size_t imageBegIndex;
208 size_t imageEndIndex;
209 if (imageIndexStrs.size() != 2)
211 ARMNN_LOG(fatal) <<
"Invalid validation range specification: Invalid format " << validationRange;
216 imageBegIndex = std::stoul(imageIndexStrs[0]);
217 imageEndIndex = std::stoul(imageIndexStrs[1]);
219 catch (
const std::exception& e)
221 ARMNN_LOG(fatal) <<
"Invalid validation range specification: " << validationRange;
226 if (!blacklistPath.empty() &&
227 !(fs::exists(blacklistPath) && fs::is_regular_file(blacklistPath)))
229 ARMNN_LOG(fatal) <<
"Invalid path to blacklist file at " << blacklistPath;
233 path pathToDataDir(dataDir);
235 validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, blacklistPath);
237 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<uint8_t>>;
255 if (inputLayout ==
"NCHW")
259 else if (inputLayout ==
"NHWC")
265 ARMNN_LOG(fatal) <<
"Invalid Data layout: " << inputLayout;
268 const unsigned int inputTensorWidth =
270 const unsigned int inputTensorHeight =
273 const unsigned int outputNumElements = model.GetOutputSize();
275 if (modelOutputLabels.size() != outputNumElements)
277 ARMNN_LOG(fatal) <<
"Number of output elements: " << outputNumElements
278 <<
" , mismatches the number of output labels: " << modelOutputLabels.size();
282 const unsigned int batchSize = 1;
285 if (modelFormat ==
"caffe")
289 else if (modelFormat ==
"tensorflow")
293 else if (modelFormat ==
"tflite")
299 ARMNN_LOG(fatal) <<
"Unsupported frontend: " << modelFormat;
303 for (
const auto& imageEntry : imageNameToLabel)
305 const std::string imageName = imageEntry.first;
306 std::cout <<
"Processing image: " << imageName <<
"\n";
308 vector<TContainer> inputDataContainers;
309 vector<TContainer> outputDataContainers;
311 auto imagePath = pathToDataDir / fs::path(imageName);
312 switch (inputTensorDataType)
315 inputDataContainers.push_back(
317 inputTensorWidth, inputTensorHeight,
320 inputTensorDataLayout));
321 outputDataContainers = { vector<int>(outputNumElements) };
324 inputDataContainers.push_back(
326 inputTensorWidth, inputTensorHeight,
329 inputTensorDataLayout));
330 outputDataContainers = { vector<uint8_t>(outputNumElements) };
334 inputDataContainers.push_back(
336 inputTensorWidth, inputTensorHeight,
339 inputTensorDataLayout));
340 outputDataContainers = { vector<float>(outputNumElements) };
344 status = runtime->EnqueueWorkload(networkId,
350 ARMNN_LOG(fatal) <<
"armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
353 checker.AddImageResult<
TContainer>(imageName, outputDataContainers);
361 for(
unsigned int i = 1; i <= 5; ++i)
363 std::cout <<
"Top " << i <<
" Accuracy: " << checker.GetAccuracy(i) <<
"%" <<
"\n";
366 ARMNN_LOG(info) <<
"Accuracy Tool ran successfully!";
374 std::cerr <<
"Armnn Error: " << e.
what() << std::endl;
377 catch (
const std::exception& e)
380 std::cerr <<
"WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the " 381 "Accuracy Tool: " << e.what() << std::endl;
static IRuntimePtr Create(const CreationOptions &options)
const TensorShape & GetShape() const
CPU Execution: Reference C++ kernels.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
NormalizationParameters GetNormalizationParameters(const SupportedFrontend &modelFormat, const armnn::DataType &outputType)
Get normalization parameters.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
virtual const char * what() const noexcept override
#define ARMNN_LOG(severity)
BackendRegistry & BackendRegistryInstance()
std::vector< uint8_t > PrepareImageTensor< uint8_t >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::BindingPointInfo BindingPointInfo
std::string GetBackendIdsAsString() const
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
std::vector< std::string > m_InputBindings
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< TContainer > &inputDataContainers)
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
#define ARMNN_ASSERT_MSG(COND, MSG)
std::vector< std::string > m_OutputBindings
std::vector< armnn::BackendId > m_ComputeDevices
DataType GetDataType() const
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
std::vector< int > PrepareImageTensor< int >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, std::vector< TContainer > &outputDataContainers)
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
std::vector< armnn::BackendId > defaultBackends
armnn::TensorInfo m_TensorInfo
Base class for all ArmNN exceptions so that users can filter to just those.
CPU Execution: NEON: ArmCompute.
bool ValidateDirectory(std::string &dir)
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
armnn::LayerBindingId m_BindingId
std::vector< float > PrepareImageTensor< float >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)