ArmNN
 20.08
InferenceTest.inl
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "InferenceTest.hpp"
6 
8 #include <boost/numeric/conversion/cast.hpp>
9 #include <boost/format.hpp>
10 #include <boost/program_options.hpp>
11 
12 #include <fstream>
13 #include <iostream>
14 #include <iomanip>
15 #include <array>
16 #include <chrono>
17 
18 using namespace std;
19 using namespace std::chrono;
20 using namespace armnn::test;
21 
22 namespace armnn
23 {
24 namespace test
25 {
26 
27 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
28 
29 template <typename TTestCaseDatabase, typename TModel>
31  int& numInferencesRef,
32  int& numCorrectInferencesRef,
33  const std::vector<unsigned int>& validationPredictions,
34  std::vector<unsigned int>* validationPredictionsOut,
35  TModel& model,
36  unsigned int testCaseId,
37  unsigned int label,
38  std::vector<typename TModel::DataType> modelInput)
39  : InferenceModelTestCase<TModel>(
40  model, testCaseId, std::vector<TContainer>{ modelInput }, { model.GetOutputSize() })
41  , m_Label(label)
42  , m_QuantizationParams(model.GetQuantizationParams())
43  , m_NumInferencesRef(numInferencesRef)
44  , m_NumCorrectInferencesRef(numCorrectInferencesRef)
45  , m_ValidationPredictions(validationPredictions)
46  , m_ValidationPredictionsOut(validationPredictionsOut)
47 {
48 }
49 
50 struct ClassifierResultProcessor : public boost::static_visitor<>
51 {
52  using ResultMap = std::map<float,int>;
53 
54  ClassifierResultProcessor(float scale, int offset)
55  : m_Scale(scale)
56  , m_Offset(offset)
57  {}
58 
59  void operator()(const std::vector<float>& values)
60  {
61  SortPredictions(values, [](float value)
62  {
63  return value;
64  });
65  }
66 
67  void operator()(const std::vector<uint8_t>& values)
68  {
69  auto& scale = m_Scale;
70  auto& offset = m_Offset;
71  SortPredictions(values, [&scale, &offset](uint8_t value)
72  {
73  return armnn::Dequantize(value, scale, offset);
74  });
75  }
76 
77  void operator()(const std::vector<int>& values)
78  {
79  IgnoreUnused(values);
80  ARMNN_ASSERT_MSG(false, "Non-float predictions output not supported.");
81  }
82 
83  ResultMap& GetResultMap() { return m_ResultMap; }
84 
85 private:
86  template<typename Container, typename Delegate>
87  void SortPredictions(const Container& c, Delegate delegate)
88  {
89  int index = 0;
90  for (const auto& value : c)
91  {
92  int classification = index++;
93  // Take the first class with each probability
94  // This avoids strange results when looping over batched results produced
95  // with identical test data.
96  ResultMap::iterator lb = m_ResultMap.lower_bound(value);
97 
98  if (lb == m_ResultMap.end() || !m_ResultMap.key_comp()(value, lb->first))
99  {
100  // If the key is not already in the map, insert it.
101  m_ResultMap.insert(lb, ResultMap::value_type(delegate(value), classification));
102  }
103  }
104  }
105 
106  ResultMap m_ResultMap;
107 
108  float m_Scale=0.0f;
109  int m_Offset=0;
110 };
111 
112 template <typename TTestCaseDatabase, typename TModel>
114 {
115  auto& output = this->GetOutputs()[0];
116  const auto testCaseId = this->GetTestCaseId();
117 
118  ClassifierResultProcessor resultProcessor(m_QuantizationParams.first, m_QuantizationParams.second);
119  boost::apply_visitor(resultProcessor, output);
120 
121  ARMNN_LOG(info) << "= Prediction values for test #" << testCaseId;
122  auto it = resultProcessor.GetResultMap().rbegin();
123  for (int i=0; i<5 && it != resultProcessor.GetResultMap().rend(); ++i)
124  {
125  ARMNN_LOG(info) << "Top(" << (i+1) << ") prediction is " << it->second <<
126  " with value: " << (it->first);
127  ++it;
128  }
129 
130  unsigned int prediction = 0;
131  boost::apply_visitor([&](auto&& value)
132  {
133  prediction = boost::numeric_cast<unsigned int>(
134  std::distance(value.begin(), std::max_element(value.begin(), value.end())));
135  },
136  output);
137 
138  // If we're just running the defaultTestCaseIds, each one must be classified correctly.
139  if (params.m_IterationCount == 0 && prediction != m_Label)
140  {
141  ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
142  " is incorrect (should be " << m_Label << ")";
143  return TestCaseResult::Failed;
144  }
145 
146  // If a validation file was provided as input, it checks that the prediction matches.
147  if (!m_ValidationPredictions.empty() && prediction != m_ValidationPredictions[testCaseId])
148  {
149  ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
150  " doesn't match the prediction in the validation file (" << m_ValidationPredictions[testCaseId] << ")";
151  return TestCaseResult::Failed;
152  }
153 
154  // If a validation file was requested as output, it stores the predictions.
155  if (m_ValidationPredictionsOut)
156  {
157  m_ValidationPredictionsOut->push_back(prediction);
158  }
159 
160  // Updates accuracy stats.
161  m_NumInferencesRef++;
162  if (prediction == m_Label)
163  {
164  m_NumCorrectInferencesRef++;
165  }
166 
167  return TestCaseResult::Ok;
168 }
169 
170 template <typename TDatabase, typename InferenceModel>
171 template <typename TConstructDatabaseCallable, typename TConstructModelCallable>
173  TConstructDatabaseCallable constructDatabase, TConstructModelCallable constructModel)
174  : m_ConstructModel(constructModel)
175  , m_ConstructDatabase(constructDatabase)
176  , m_NumInferences(0)
177  , m_NumCorrectInferences(0)
178 {
179 }
180 
181 template <typename TDatabase, typename InferenceModel>
183  boost::program_options::options_description& options)
184 {
185  namespace po = boost::program_options;
186 
187  options.add_options()
188  ("validation-file-in", po::value<std::string>(&m_ValidationFileIn)->default_value(""),
189  "Reads expected predictions from the given file and confirms they match the actual predictions.")
190  ("validation-file-out", po::value<std::string>(&m_ValidationFileOut)->default_value(""),
191  "Predictions are saved to the given file for later use via --validation-file-in.")
192  ("data-dir,d", po::value<std::string>(&m_DataDir)->required(),
193  "Path to directory containing test data");
194 
195  InferenceModel::AddCommandLineOptions(options, m_ModelCommandLineOptions);
196 }
197 
198 template <typename TDatabase, typename InferenceModel>
200  const InferenceTestOptions& commonOptions)
201 {
202  if (!ValidateDirectory(m_DataDir))
203  {
204  return false;
205  }
206 
207  ReadPredictions();
208 
209  m_Model = m_ConstructModel(commonOptions, m_ModelCommandLineOptions);
210  if (!m_Model)
211  {
212  return false;
213  }
214 
215  m_Database = std::make_unique<TDatabase>(m_ConstructDatabase(m_DataDir.c_str(), *m_Model));
216  if (!m_Database)
217  {
218  return false;
219  }
220 
221  return true;
222 }
223 
224 template <typename TDatabase, typename InferenceModel>
225 std::unique_ptr<IInferenceTestCase>
227 {
228  std::unique_ptr<typename TDatabase::TTestCaseData> testCaseData = m_Database->GetTestCaseData(testCaseId);
229  if (testCaseData == nullptr)
230  {
231  return nullptr;
232  }
233 
234  return std::make_unique<ClassifierTestCase<TDatabase, InferenceModel>>(
235  m_NumInferences,
236  m_NumCorrectInferences,
237  m_ValidationPredictions,
238  m_ValidationFileOut.empty() ? nullptr : &m_ValidationPredictionsOut,
239  *m_Model,
240  testCaseId,
241  testCaseData->m_Label,
242  std::move(testCaseData->m_InputImage));
243 }
244 
245 template <typename TDatabase, typename InferenceModel>
247 {
248  const double accuracy = boost::numeric_cast<double>(m_NumCorrectInferences) /
249  boost::numeric_cast<double>(m_NumInferences);
250  ARMNN_LOG(info) << std::fixed << std::setprecision(3) << "Overall accuracy: " << accuracy;
251 
252  // If a validation file was requested as output, the predictions are saved to it.
253  if (!m_ValidationFileOut.empty())
254  {
255  std::ofstream validationFileOut(m_ValidationFileOut.c_str(), std::ios_base::trunc | std::ios_base::out);
256  if (validationFileOut.good())
257  {
258  for (const unsigned int prediction : m_ValidationPredictionsOut)
259  {
260  validationFileOut << prediction << std::endl;
261  }
262  }
263  else
264  {
265  ARMNN_LOG(error) << "Failed to open output validation file: " << m_ValidationFileOut;
266  return false;
267  }
268  }
269 
270  return true;
271 }
272 
273 template <typename TDatabase, typename InferenceModel>
275 {
276  // Reads the expected predictions from the input validation file (if provided).
277  if (!m_ValidationFileIn.empty())
278  {
279  std::ifstream validationFileIn(m_ValidationFileIn.c_str(), std::ios_base::in);
280  if (validationFileIn.good())
281  {
282  while (!validationFileIn.eof())
283  {
284  unsigned int i;
285  validationFileIn >> i;
286  m_ValidationPredictions.emplace_back(i);
287  }
288  }
289  else
290  {
291  throw armnn::Exception(boost::str(boost::format("Failed to open input validation file: %1%")
292  % m_ValidationFileIn));
293  }
294  }
295 }
296 
297 template<typename TConstructTestCaseProvider>
298 int InferenceTestMain(int argc,
299  char* argv[],
300  const std::vector<unsigned int>& defaultTestCaseIds,
301  TConstructTestCaseProvider constructTestCaseProvider)
302 {
303  // Configures logging for both the ARMNN library and this test program.
304 #ifdef NDEBUG
306 #else
308 #endif
309  armnn::ConfigureLogging(true, true, level);
310 
311  try
312  {
313  std::unique_ptr<IInferenceTestCaseProvider> testCaseProvider = constructTestCaseProvider();
314  if (!testCaseProvider)
315  {
316  return 1;
317  }
318 
319  InferenceTestOptions inferenceTestOptions;
320  if (!ParseCommandLine(argc, argv, *testCaseProvider, inferenceTestOptions))
321  {
322  return 1;
323  }
324 
325  const bool success = InferenceTest(inferenceTestOptions, defaultTestCaseIds, *testCaseProvider);
326  return success ? 0 : 1;
327  }
328  catch (armnn::Exception const& e)
329  {
330  ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
331  return 1;
332  }
333 }
334 
335 //
336 // This function allows us to create a classifier inference test based on:
337 // - a model file name
338 // - which can be a binary or a text file for protobuf formats
339 // - an input tensor name
340 // - an output tensor name
341 // - a set of test case ids
342 // - a callback method which creates an object that can return images
343 // called 'Database' in these tests
344 // - and an input tensor shape
345 //
346 template<typename TDatabase,
347  typename TParser,
348  typename TConstructDatabaseCallable>
350  char* argv[],
351  const char* modelFilename,
352  bool isModelBinary,
353  const char* inputBindingName,
354  const char* outputBindingName,
355  const std::vector<unsigned int>& defaultTestCaseIds,
356  TConstructDatabaseCallable constructDatabase,
357  const armnn::TensorShape* inputTensorShape)
358 
359 {
360  ARMNN_ASSERT(modelFilename);
361  ARMNN_ASSERT(inputBindingName);
362  ARMNN_ASSERT(outputBindingName);
363 
364  return InferenceTestMain(argc, argv, defaultTestCaseIds,
365  [=]
366  ()
367  {
370 
371  return make_unique<TestCaseProvider>(constructDatabase,
372  [&]
373  (const InferenceTestOptions &commonOptions,
374  typename InferenceModel::CommandLineOptions modelOptions)
375  {
376  if (!ValidateDirectory(modelOptions.m_ModelDir))
377  {
378  return std::unique_ptr<InferenceModel>();
379  }
380 
381  typename InferenceModel::Params modelParams;
382  modelParams.m_ModelPath = modelOptions.m_ModelDir + modelFilename;
383  modelParams.m_InputBindings = { inputBindingName };
384  modelParams.m_OutputBindings = { outputBindingName };
385 
386  if (inputTensorShape)
387  {
388  modelParams.m_InputShapes.push_back(*inputTensorShape);
389  }
390 
391  modelParams.m_IsModelBinary = isModelBinary;
392  modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
393  modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
394  modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
395 
396  return std::make_unique<InferenceModel>(modelParams,
397  commonOptions.m_EnableProfiling,
398  commonOptions.m_DynamicBackendsPath);
399  });
400  });
401 }
402 
403 } // namespace test
404 } // namespace armnn
bool ParseCommandLine(int argc, char **argv, IInferenceTestCaseProvider &testCaseProvider, InferenceTestOptions &outParams)
Parse the command line of an ArmNN (or referencetests) inference test program.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:10
const std::vector< TContainer > & GetOutputs() const
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
ClassifierTestCaseProvider(TConstructDatabaseCallable constructDatabase, TConstructModelCallable constructModel)
Copyright (c) 2020 ARM Limited.
void IgnoreUnused(Ts &&...)
virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions) override
virtual bool OnInferenceTestFinished() override
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
virtual TestCaseResult ProcessResult(const InferenceTestOptions &params) override
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
std::enable_if_t< std::is_unsigned< Source >::value &&std::is_unsigned< Dest >::value, Dest > numeric_cast(Source source)
Definition: NumericCast.hpp:33
int ClassifierInferenceTestMain(int argc, char *argv[], const char *modelFilename, bool isModelBinary, const char *inputBindingName, const char *outputBindingName, const std::vector< unsigned int > &defaultTestCaseIds, TConstructDatabaseCallable constructDatabase, const armnn::TensorShape *inputTensorShape=nullptr)
bool InferenceTest(const InferenceTestOptions &params, const std::vector< unsigned int > &defaultTestCaseIds, IInferenceTestCaseProvider &testCaseProvider)
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
virtual std::unique_ptr< IInferenceTestCase > GetTestCase(unsigned int testCaseId) override
virtual void AddCommandLineOptions(boost::program_options::options_description &options) override
static void AddCommandLineOptions(boost::program_options::options_description &desc, CommandLineOptions &options)
bool ValidateDirectory(std::string &dir)
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
LogSeverity
Definition: Utils.hpp:12
int InferenceTestMain(int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)
The test completed without any errors.