ArmNN  NotReleased
InferenceTest.inl
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "InferenceTest.hpp"
6 
7 #include <boost/algorithm/string.hpp>
8 #include <boost/numeric/conversion/cast.hpp>
9 #include <boost/filesystem/path.hpp>
10 #include <boost/assert.hpp>
11 #include <boost/format.hpp>
12 #include <boost/program_options.hpp>
13 #include <boost/filesystem/operations.hpp>
14 
15 #include <fstream>
16 #include <iostream>
17 #include <iomanip>
18 #include <array>
19 #include <chrono>
20 
21 using namespace std;
22 using namespace std::chrono;
23 using namespace armnn::test;
24 
25 namespace armnn
26 {
27 namespace test
28 {
29 
30 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
31 
32 template <typename TTestCaseDatabase, typename TModel>
34  int& numInferencesRef,
35  int& numCorrectInferencesRef,
36  const std::vector<unsigned int>& validationPredictions,
37  std::vector<unsigned int>* validationPredictionsOut,
38  TModel& model,
39  unsigned int testCaseId,
40  unsigned int label,
41  std::vector<typename TModel::DataType> modelInput)
42  : InferenceModelTestCase<TModel>(
43  model, testCaseId, std::vector<TContainer>{ modelInput }, { model.GetOutputSize() })
44  , m_Label(label)
45  , m_QuantizationParams(model.GetQuantizationParams())
46  , m_NumInferencesRef(numInferencesRef)
47  , m_NumCorrectInferencesRef(numCorrectInferencesRef)
48  , m_ValidationPredictions(validationPredictions)
49  , m_ValidationPredictionsOut(validationPredictionsOut)
50 {
51 }
52 
53 struct ClassifierResultProcessor : public boost::static_visitor<>
54 {
55  using ResultMap = std::map<float,int>;
56 
57  ClassifierResultProcessor(float scale, int offset)
58  : m_Scale(scale)
59  , m_Offset(offset)
60  {}
61 
62  void operator()(const std::vector<float>& values)
63  {
64  SortPredictions(values, [](float value)
65  {
66  return value;
67  });
68  }
69 
70  void operator()(const std::vector<uint8_t>& values)
71  {
72  auto& scale = m_Scale;
73  auto& offset = m_Offset;
74  SortPredictions(values, [&scale, &offset](uint8_t value)
75  {
76  return armnn::Dequantize(value, scale, offset);
77  });
78  }
79 
80  void operator()(const std::vector<int>& values)
81  {
82  boost::ignore_unused(values);
83  BOOST_ASSERT_MSG(false, "Non-float predictions output not supported.");
84  }
85 
86  ResultMap& GetResultMap() { return m_ResultMap; }
87 
88 private:
89  template<typename Container, typename Delegate>
90  void SortPredictions(const Container& c, Delegate delegate)
91  {
92  int index = 0;
93  for (const auto& value : c)
94  {
95  int classification = index++;
96  // Take the first class with each probability
97  // This avoids strange results when looping over batched results produced
98  // with identical test data.
99  ResultMap::iterator lb = m_ResultMap.lower_bound(value);
100 
101  if (lb == m_ResultMap.end() || !m_ResultMap.key_comp()(value, lb->first))
102  {
103  // If the key is not already in the map, insert it.
104  m_ResultMap.insert(lb, ResultMap::value_type(delegate(value), classification));
105  }
106  }
107  }
108 
109  ResultMap m_ResultMap;
110 
111  float m_Scale=0.0f;
112  int m_Offset=0;
113 };
114 
115 template <typename TTestCaseDatabase, typename TModel>
117 {
118  auto& output = this->GetOutputs()[0];
119  const auto testCaseId = this->GetTestCaseId();
120 
121  ClassifierResultProcessor resultProcessor(m_QuantizationParams.first, m_QuantizationParams.second);
122  boost::apply_visitor(resultProcessor, output);
123 
124  ARMNN_LOG(info) << "= Prediction values for test #" << testCaseId;
125  auto it = resultProcessor.GetResultMap().rbegin();
126  for (int i=0; i<5 && it != resultProcessor.GetResultMap().rend(); ++i)
127  {
128  ARMNN_LOG(info) << "Top(" << (i+1) << ") prediction is " << it->second <<
129  " with value: " << (it->first);
130  ++it;
131  }
132 
133  unsigned int prediction = 0;
134  boost::apply_visitor([&](auto&& value)
135  {
136  prediction = boost::numeric_cast<unsigned int>(
137  std::distance(value.begin(), std::max_element(value.begin(), value.end())));
138  },
139  output);
140 
141  // If we're just running the defaultTestCaseIds, each one must be classified correctly.
142  if (params.m_IterationCount == 0 && prediction != m_Label)
143  {
144  ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
145  " is incorrect (should be " << m_Label << ")";
146  return TestCaseResult::Failed;
147  }
148 
149  // If a validation file was provided as input, it checks that the prediction matches.
150  if (!m_ValidationPredictions.empty() && prediction != m_ValidationPredictions[testCaseId])
151  {
152  ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
153  " doesn't match the prediction in the validation file (" << m_ValidationPredictions[testCaseId] << ")";
154  return TestCaseResult::Failed;
155  }
156 
157  // If a validation file was requested as output, it stores the predictions.
158  if (m_ValidationPredictionsOut)
159  {
160  m_ValidationPredictionsOut->push_back(prediction);
161  }
162 
163  // Updates accuracy stats.
164  m_NumInferencesRef++;
165  if (prediction == m_Label)
166  {
167  m_NumCorrectInferencesRef++;
168  }
169 
170  return TestCaseResult::Ok;
171 }
172 
173 template <typename TDatabase, typename InferenceModel>
174 template <typename TConstructDatabaseCallable, typename TConstructModelCallable>
176  TConstructDatabaseCallable constructDatabase, TConstructModelCallable constructModel)
177  : m_ConstructModel(constructModel)
178  , m_ConstructDatabase(constructDatabase)
179  , m_NumInferences(0)
180  , m_NumCorrectInferences(0)
181 {
182 }
183 
184 template <typename TDatabase, typename InferenceModel>
186  boost::program_options::options_description& options)
187 {
188  namespace po = boost::program_options;
189 
190  options.add_options()
191  ("validation-file-in", po::value<std::string>(&m_ValidationFileIn)->default_value(""),
192  "Reads expected predictions from the given file and confirms they match the actual predictions.")
193  ("validation-file-out", po::value<std::string>(&m_ValidationFileOut)->default_value(""),
194  "Predictions are saved to the given file for later use via --validation-file-in.")
195  ("data-dir,d", po::value<std::string>(&m_DataDir)->required(),
196  "Path to directory containing test data");
197 
198  InferenceModel::AddCommandLineOptions(options, m_ModelCommandLineOptions);
199 }
200 
201 template <typename TDatabase, typename InferenceModel>
203  const InferenceTestOptions& commonOptions)
204 {
205  if (!ValidateDirectory(m_DataDir))
206  {
207  return false;
208  }
209 
210  ReadPredictions();
211 
212  m_Model = m_ConstructModel(commonOptions, m_ModelCommandLineOptions);
213  if (!m_Model)
214  {
215  return false;
216  }
217 
218  m_Database = std::make_unique<TDatabase>(m_ConstructDatabase(m_DataDir.c_str(), *m_Model));
219  if (!m_Database)
220  {
221  return false;
222  }
223 
224  return true;
225 }
226 
227 template <typename TDatabase, typename InferenceModel>
228 std::unique_ptr<IInferenceTestCase>
230 {
231  std::unique_ptr<typename TDatabase::TTestCaseData> testCaseData = m_Database->GetTestCaseData(testCaseId);
232  if (testCaseData == nullptr)
233  {
234  return nullptr;
235  }
236 
237  return std::make_unique<ClassifierTestCase<TDatabase, InferenceModel>>(
238  m_NumInferences,
239  m_NumCorrectInferences,
240  m_ValidationPredictions,
241  m_ValidationFileOut.empty() ? nullptr : &m_ValidationPredictionsOut,
242  *m_Model,
243  testCaseId,
244  testCaseData->m_Label,
245  std::move(testCaseData->m_InputImage));
246 }
247 
248 template <typename TDatabase, typename InferenceModel>
250 {
251  const double accuracy = boost::numeric_cast<double>(m_NumCorrectInferences) /
252  boost::numeric_cast<double>(m_NumInferences);
253  ARMNN_LOG(info) << std::fixed << std::setprecision(3) << "Overall accuracy: " << accuracy;
254 
255  // If a validation file was requested as output, the predictions are saved to it.
256  if (!m_ValidationFileOut.empty())
257  {
258  std::ofstream validationFileOut(m_ValidationFileOut.c_str(), std::ios_base::trunc | std::ios_base::out);
259  if (validationFileOut.good())
260  {
261  for (const unsigned int prediction : m_ValidationPredictionsOut)
262  {
263  validationFileOut << prediction << std::endl;
264  }
265  }
266  else
267  {
268  ARMNN_LOG(error) << "Failed to open output validation file: " << m_ValidationFileOut;
269  return false;
270  }
271  }
272 
273  return true;
274 }
275 
276 template <typename TDatabase, typename InferenceModel>
278 {
279  // Reads the expected predictions from the input validation file (if provided).
280  if (!m_ValidationFileIn.empty())
281  {
282  std::ifstream validationFileIn(m_ValidationFileIn.c_str(), std::ios_base::in);
283  if (validationFileIn.good())
284  {
285  while (!validationFileIn.eof())
286  {
287  unsigned int i;
288  validationFileIn >> i;
289  m_ValidationPredictions.emplace_back(i);
290  }
291  }
292  else
293  {
294  throw armnn::Exception(boost::str(boost::format("Failed to open input validation file: %1%")
295  % m_ValidationFileIn));
296  }
297  }
298 }
299 
300 template<typename TConstructTestCaseProvider>
301 int InferenceTestMain(int argc,
302  char* argv[],
303  const std::vector<unsigned int>& defaultTestCaseIds,
304  TConstructTestCaseProvider constructTestCaseProvider)
305 {
306  // Configures logging for both the ARMNN library and this test program.
307 #ifdef NDEBUG
309 #else
311 #endif
312  armnn::ConfigureLogging(true, true, level);
313 
314  try
315  {
316  std::unique_ptr<IInferenceTestCaseProvider> testCaseProvider = constructTestCaseProvider();
317  if (!testCaseProvider)
318  {
319  return 1;
320  }
321 
322  InferenceTestOptions inferenceTestOptions;
323  if (!ParseCommandLine(argc, argv, *testCaseProvider, inferenceTestOptions))
324  {
325  return 1;
326  }
327 
328  const bool success = InferenceTest(inferenceTestOptions, defaultTestCaseIds, *testCaseProvider);
329  return success ? 0 : 1;
330  }
331  catch (armnn::Exception const& e)
332  {
333  ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
334  return 1;
335  }
336 }
337 
338 //
339 // This function allows us to create a classifier inference test based on:
340 // - a model file name
341 // - which can be a binary or a text file for protobuf formats
342 // - an input tensor name
343 // - an output tensor name
344 // - a set of test case ids
345 // - a callback method which creates an object that can return images
346 // called 'Database' in these tests
347 // - and an input tensor shape
348 //
349 template<typename TDatabase,
350  typename TParser,
351  typename TConstructDatabaseCallable>
353  char* argv[],
354  const char* modelFilename,
355  bool isModelBinary,
356  const char* inputBindingName,
357  const char* outputBindingName,
358  const std::vector<unsigned int>& defaultTestCaseIds,
359  TConstructDatabaseCallable constructDatabase,
360  const armnn::TensorShape* inputTensorShape)
361 
362 {
363  BOOST_ASSERT(modelFilename);
364  BOOST_ASSERT(inputBindingName);
365  BOOST_ASSERT(outputBindingName);
366 
367  return InferenceTestMain(argc, argv, defaultTestCaseIds,
368  [=]
369  ()
370  {
373 
374  return make_unique<TestCaseProvider>(constructDatabase,
375  [&]
376  (const InferenceTestOptions &commonOptions,
377  typename InferenceModel::CommandLineOptions modelOptions)
378  {
379  if (!ValidateDirectory(modelOptions.m_ModelDir))
380  {
381  return std::unique_ptr<InferenceModel>();
382  }
383 
384  typename InferenceModel::Params modelParams;
385  modelParams.m_ModelPath = modelOptions.m_ModelDir + modelFilename;
386  modelParams.m_InputBindings = { inputBindingName };
387  modelParams.m_OutputBindings = { outputBindingName };
388 
389  if (inputTensorShape)
390  {
391  modelParams.m_InputShapes.push_back(*inputTensorShape);
392  }
393 
394  modelParams.m_IsModelBinary = isModelBinary;
395  modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
396  modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
397  modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
398 
399  return std::make_unique<InferenceModel>(modelParams,
400  commonOptions.m_EnableProfiling,
401  commonOptions.m_DynamicBackendsPath);
402  });
403  });
404 }
405 
406 } // namespace test
407 } // namespace armnn
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Definition: Utils.cpp:10
bool InferenceTest(const InferenceTestOptions &params, const std::vector< unsigned int > &defaultTestCaseIds, IInferenceTestCaseProvider &testCaseProvider)
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions) override
int ClassifierInferenceTestMain(int argc, char *argv[], const char *modelFilename, bool isModelBinary, const char *inputBindingName, const char *outputBindingName, const std::vector< unsigned int > &defaultTestCaseIds, TConstructDatabaseCallable constructDatabase, const armnn::TensorShape *inputTensorShape=nullptr)
bool ParseCommandLine(int argc, char **argv, IInferenceTestCaseProvider &testCaseProvider, InferenceTestOptions &outParams)
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
virtual std::unique_ptr< IInferenceTestCase > GetTestCase(unsigned int testCaseId) override
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
The test completed without any errors.
ClassifierTestCaseProvider(TConstructDatabaseCallable constructDatabase, TConstructModelCallable constructModel)
LogSeverity
Definition: Utils.hpp:12
virtual void AddCommandLineOptions(boost::program_options::options_description &options) override
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
const std::vector< TContainer > & GetOutputs() const
static void AddCommandLineOptions(boost::program_options::options_description &desc, CommandLineOptions &options)
virtual bool OnInferenceTestFinished() override
int InferenceTestMain(int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
virtual TestCaseResult ProcessResult(const InferenceTestOptions &params) override
bool ValidateDirectory(std::string &dir)