ArmNN
 20.08
armnn::test Namespace Reference

Classes

class  ClassifierTestCase
 
class  ClassifierTestCaseProvider
 
class  IInferenceTestCase
 
class  IInferenceTestCaseProvider
 
class  InferenceModelTestCase
 
struct  InferenceTestOptions
 
class  TestFrameworkException
 

Typedefs

using TContainer = boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > >
 

Enumerations

enum  TestCaseResult { Ok, Failed, Abort }
 

Functions

bool ParseCommandLine (int argc, char **argv, IInferenceTestCaseProvider &testCaseProvider, InferenceTestOptions &outParams)
 Parse the command line of an ArmNN (or referencetests) inference test program. More...
 
bool ValidateDirectory (std::string &dir)
 
bool InferenceTest (const InferenceTestOptions &params, const std::vector< unsigned int > &defaultTestCaseIds, IInferenceTestCaseProvider &testCaseProvider)
 
template<typename TConstructTestCaseProvider >
int InferenceTestMain (int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)
 
template<typename TDatabase , typename TParser , typename TConstructDatabaseCallable >
int ClassifierInferenceTestMain (int argc, char *argv[], const char *modelFilename, bool isModelBinary, const char *inputBindingName, const char *outputBindingName, const std::vector< unsigned int > &defaultTestCaseIds, TConstructDatabaseCallable constructDatabase, const armnn::TensorShape *inputTensorShape=nullptr)
 

Typedef Documentation

◆ TContainer

using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char> >

Definition at line 27 of file InferenceTest.inl.

Enumeration Type Documentation

◆ TestCaseResult

enum TestCaseResult
strong
Enumerator
Ok 

The test completed without any errors.

Failed 

The test failed (e.g.

the prediction didn't match the validation file). This will eventually fail the whole program but the remaining test cases will still be run.

Abort 

The test failed with a fatal error. The remaining tests will not be run.

Definition at line 70 of file InferenceTest.hpp.

71 {
72  /// The test completed without any errors.
73  Ok,
74  /// The test failed (e.g. the prediction didn't match the validation file).
75  /// This will eventually fail the whole program but the remaining test cases will still be run.
76  Failed,
77  /// The test failed with a fatal error. The remaining tests will not be run.
78  Abort
79 };
The test failed with a fatal error. The remaining tests will not be run.

Function Documentation

◆ ClassifierInferenceTestMain()

int ClassifierInferenceTestMain ( int  argc,
char *  argv[],
const char *  modelFilename,
bool  isModelBinary,
const char *  inputBindingName,
const char *  outputBindingName,
const std::vector< unsigned int > &  defaultTestCaseIds,
TConstructDatabaseCallable  constructDatabase,
const armnn::TensorShape inputTensorShape = nullptr 
)

Definition at line 349 of file InferenceTest.inl.

References ARMNN_ASSERT, InferenceTestMain(), Params::m_ModelPath, and ValidateDirectory().

Referenced by main().

359 {
360  ARMNN_ASSERT(modelFilename);
361  ARMNN_ASSERT(inputBindingName);
362  ARMNN_ASSERT(outputBindingName);
363 
364  return InferenceTestMain(argc, argv, defaultTestCaseIds,
365  [=]
366  ()
367  {
370 
371  return make_unique<TestCaseProvider>(constructDatabase,
372  [&]
373  (const InferenceTestOptions &commonOptions,
374  typename InferenceModel::CommandLineOptions modelOptions)
375  {
376  if (!ValidateDirectory(modelOptions.m_ModelDir))
377  {
378  return std::unique_ptr<InferenceModel>();
379  }
380 
381  typename InferenceModel::Params modelParams;
382  modelParams.m_ModelPath = modelOptions.m_ModelDir + modelFilename;
383  modelParams.m_InputBindings = { inputBindingName };
384  modelParams.m_OutputBindings = { outputBindingName };
385 
386  if (inputTensorShape)
387  {
388  modelParams.m_InputShapes.push_back(*inputTensorShape);
389  }
390 
391  modelParams.m_IsModelBinary = isModelBinary;
392  modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
393  modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
394  modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
395 
396  return std::make_unique<InferenceModel>(modelParams,
397  commonOptions.m_EnableProfiling,
398  commonOptions.m_DynamicBackendsPath);
399  });
400  });
401 }
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
bool ValidateDirectory(std::string &dir)
int InferenceTestMain(int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)

◆ InferenceTest()

bool InferenceTest ( const InferenceTestOptions params,
const std::vector< unsigned int > &  defaultTestCaseIds,
IInferenceTestCaseProvider testCaseProvider 
)

Definition at line 119 of file InferenceTest.cpp.

References ARMNN_ASSERT_MSG, ARMNN_LOG, armnn::error, IInferenceTestCaseProvider::GetTestCase(), armnn::info, InferenceTestOptions::m_EnableProfiling, InferenceTestOptions::m_InferenceTimesFile, InferenceTestOptions::m_IterationCount, IInferenceTestCaseProvider::OnInferenceTestFinished(), armnn::warning, and Exception::what().

Referenced by InferenceTestMain().

122 {
123 #if !defined (NDEBUG)
124  if (params.m_IterationCount > 0) // If just running a few select images then don't bother to warn.
125  {
126  ARMNN_LOG(warning) << "Performance test running in DEBUG build - results may be inaccurate.";
127  }
128 #endif
129 
130  double totalTime = 0;
131  unsigned int nbProcessed = 0;
132  bool success = true;
133 
134  // Opens the file to write inference times too, if needed.
135  ofstream inferenceTimesFile;
136  const bool recordInferenceTimes = !params.m_InferenceTimesFile.empty();
137  if (recordInferenceTimes)
138  {
139  inferenceTimesFile.open(params.m_InferenceTimesFile.c_str(), ios_base::trunc | ios_base::out);
140  if (!inferenceTimesFile.good())
141  {
142  ARMNN_LOG(error) << "Failed to open inference times file for writing: "
143  << params.m_InferenceTimesFile;
144  return false;
145  }
146  }
147 
148  // Create a profiler and register it for the current thread.
149  std::unique_ptr<Profiler> profiler = std::make_unique<Profiler>();
150  ProfilerManager::GetInstance().RegisterProfiler(profiler.get());
151 
152  // Enable profiling if requested.
153  profiler->EnableProfiling(params.m_EnableProfiling);
154 
155  // Run a single test case to 'warm-up' the model. The first one can sometimes take up to 10x longer
156  std::unique_ptr<IInferenceTestCase> warmupTestCase = testCaseProvider.GetTestCase(0);
157  if (warmupTestCase == nullptr)
158  {
159  ARMNN_LOG(error) << "Failed to load test case";
160  return false;
161  }
162 
163  try
164  {
165  warmupTestCase->Run();
166  }
167  catch (const TestFrameworkException& testError)
168  {
169  ARMNN_LOG(error) << testError.what();
170  return false;
171  }
172 
173  const unsigned int nbTotalToProcess = params.m_IterationCount > 0 ? params.m_IterationCount
174  : static_cast<unsigned int>(defaultTestCaseIds.size());
175 
176  for (; nbProcessed < nbTotalToProcess; nbProcessed++)
177  {
178  const unsigned int testCaseId = params.m_IterationCount > 0 ? nbProcessed : defaultTestCaseIds[nbProcessed];
179  std::unique_ptr<IInferenceTestCase> testCase = testCaseProvider.GetTestCase(testCaseId);
180 
181  if (testCase == nullptr)
182  {
183  ARMNN_LOG(error) << "Failed to load test case";
184  return false;
185  }
186 
187  time_point<high_resolution_clock> predictStart;
188  time_point<high_resolution_clock> predictEnd;
189 
190  TestCaseResult result = TestCaseResult::Ok;
191 
192  try
193  {
194  predictStart = high_resolution_clock::now();
195 
196  testCase->Run();
197 
198  predictEnd = high_resolution_clock::now();
199 
200  // duration<double> will convert the time difference into seconds as a double by default.
201  double timeTakenS = duration<double>(predictEnd - predictStart).count();
202  totalTime += timeTakenS;
203 
204  // Outputss inference times, if needed.
205  if (recordInferenceTimes)
206  {
207  inferenceTimesFile << testCaseId << " " << (timeTakenS * 1000.0) << std::endl;
208  }
209 
210  result = testCase->ProcessResult(params);
211 
212  }
213  catch (const TestFrameworkException& testError)
214  {
215  ARMNN_LOG(error) << testError.what();
216  result = TestCaseResult::Abort;
217  }
218 
219  switch (result)
220  {
221  case TestCaseResult::Ok:
222  break;
223  case TestCaseResult::Abort:
224  return false;
225  case TestCaseResult::Failed:
226  // This test failed so we will fail the entire program eventually, but keep going for now.
227  success = false;
228  break;
229  default:
230  ARMNN_ASSERT_MSG(false, "Unexpected TestCaseResult");
231  return false;
232  }
233  }
234 
235  const double averageTimePerTestCaseMs = totalTime / nbProcessed * 1000.0f;
236 
237  ARMNN_LOG(info) << std::fixed << std::setprecision(3) <<
238  "Total time for " << nbProcessed << " test cases: " << totalTime << " seconds";
239  ARMNN_LOG(info) << std::fixed << std::setprecision(3) <<
240  "Average time per test case: " << averageTimePerTestCaseMs << " ms";
241 
242  // if profiling is enabled print out the results
243  if (profiler && profiler->IsProfilingEnabled())
244  {
245  profiler->Print(std::cout);
246  }
247 
248  if (!success)
249  {
250  ARMNN_LOG(error) << "One or more test cases failed";
251  return false;
252  }
253 
254  return testCaseProvider.OnInferenceTestFinished();
255 }
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
virtual std::unique_ptr< IInferenceTestCase > GetTestCase(unsigned int testCaseId)=0

◆ InferenceTestMain()

int InferenceTestMain ( int  argc,
char *  argv[],
const std::vector< unsigned int > &  defaultTestCaseIds,
TConstructTestCaseProvider  constructTestCaseProvider 
)

Definition at line 298 of file InferenceTest.inl.

References ARMNN_LOG, armnn::ConfigureLogging(), armnn::Debug, armnn::fatal, InferenceTest(), armnn::Info, ParseCommandLine(), and Exception::what().

Referenced by ClassifierInferenceTestMain(), and main().

302 {
303  // Configures logging for both the ARMNN library and this test program.
304 #ifdef NDEBUG
306 #else
308 #endif
309  armnn::ConfigureLogging(true, true, level);
310 
311  try
312  {
313  std::unique_ptr<IInferenceTestCaseProvider> testCaseProvider = constructTestCaseProvider();
314  if (!testCaseProvider)
315  {
316  return 1;
317  }
318 
319  InferenceTestOptions inferenceTestOptions;
320  if (!ParseCommandLine(argc, argv, *testCaseProvider, inferenceTestOptions))
321  {
322  return 1;
323  }
324 
325  const bool success = InferenceTest(inferenceTestOptions, defaultTestCaseIds, *testCaseProvider);
326  return success ? 0 : 1;
327  }
328  catch (armnn::Exception const& e)
329  {
330  ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
331  return 1;
332  }
333 }
bool ParseCommandLine(int argc, char **argv, IInferenceTestCaseProvider &testCaseProvider, InferenceTestOptions &outParams)
Parse the command line of an ArmNN (or referencetests) inference test program.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:10
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
bool InferenceTest(const InferenceTestOptions &params, const std::vector< unsigned int > &defaultTestCaseIds, IInferenceTestCaseProvider &testCaseProvider)
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
LogSeverity
Definition: Utils.hpp:12

◆ ParseCommandLine()

bool ParseCommandLine ( int  argc,
char **  argv,
IInferenceTestCaseProvider testCaseProvider,
InferenceTestOptions outParams 
)

Parse the command line of an ArmNN (or referencetests) inference test program.

Returns
false if any error occurred during options processing, otherwise true

Definition at line 30 of file InferenceTest.cpp.

References IInferenceTestCaseProvider::AddCommandLineOptions(), ARMNN_ASSERT_MSG, InferenceTestOptions::m_EnableProfiling, InferenceTestOptions::m_InferenceTimesFile, InferenceTestOptions::m_IterationCount, and IInferenceTestCaseProvider::ProcessCommandLineOptions().

Referenced by InferenceTestMain().

32 {
33  namespace po = boost::program_options;
34 
35  po::options_description desc("Options");
36 
37  try
38  {
39  // Adds generic options needed for all inference tests.
40  desc.add_options()
41  ("help", "Display help messages")
42  ("iterations,i", po::value<unsigned int>(&outParams.m_IterationCount)->default_value(0),
43  "Sets the number number of inferences to perform. If unset, a default number will be ran.")
44  ("inference-times-file", po::value<std::string>(&outParams.m_InferenceTimesFile)->default_value(""),
45  "If non-empty, each individual inference time will be recorded and output to this file")
46  ("event-based-profiling,e", po::value<bool>(&outParams.m_EnableProfiling)->default_value(0),
47  "Enables built in profiler. If unset, defaults to off.");
48 
49  // Adds options specific to the ITestCaseProvider.
50  testCaseProvider.AddCommandLineOptions(desc);
51  }
52  catch (const std::exception& e)
53  {
54  // Coverity points out that default_value(...) can throw a bad_lexical_cast,
55  // and that desc.add_options() can throw boost::io::too_few_args.
56  // They really won't in any of these cases.
57  ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
58  std::cerr << "Fatal internal error: " << e.what() << std::endl;
59  return false;
60  }
61 
62  po::variables_map vm;
63 
64  try
65  {
66  po::store(po::parse_command_line(argc, argv, desc), vm);
67 
68  if (vm.count("help"))
69  {
70  std::cout << desc << std::endl;
71  return false;
72  }
73 
74  po::notify(vm);
75  }
76  catch (po::error& e)
77  {
78  std::cerr << e.what() << std::endl << std::endl;
79  std::cerr << desc << std::endl;
80  return false;
81  }
82 
83  if (!testCaseProvider.ProcessCommandLineOptions(outParams))
84  {
85  return false;
86  }
87 
88  return true;
89 }
virtual void AddCommandLineOptions(boost::program_options::options_description &options)
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions)

◆ ValidateDirectory()

bool ValidateDirectory ( std::string &  dir)

Definition at line 91 of file InferenceTest.cpp.

Referenced by ClassifierInferenceTestMain(), main(), ClassifierTestCaseProvider< TDatabase, InferenceModel >::ProcessCommandLineOptions(), and YoloTestCaseProvider< Model >::ProcessCommandLineOptions().

92 {
93  if (dir.empty())
94  {
95  std::cerr << "No directory specified" << std::endl;
96  return false;
97  }
98 
99  if (dir[dir.length() - 1] != '/')
100  {
101  dir += "/";
102  }
103 
104  if (!fs::exists(dir))
105  {
106  std::cerr << "Given directory " << dir << " does not exist" << std::endl;
107  return false;
108  }
109 
110  if (!fs::is_directory(dir))
111  {
112  std::cerr << "Given directory [" << dir << "] is not a directory" << std::endl;
113  return false;
114  }
115 
116  return true;
117 }