ArmNN
 21.02
armnn::test Namespace Reference

Classes

class  ClassifierTestCase
 
class  ClassifierTestCaseProvider
 
class  IInferenceTestCase
 
class  IInferenceTestCaseProvider
 
class  InferenceModelTestCase
 
struct  InferenceTestOptions
 
class  TestFrameworkException
 

Typedefs

using TContainer = mapbox::util::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > >
 

Enumerations

enum  TestCaseResult { Ok, Failed, Abort }
 

Functions

bool ParseCommandLine (int argc, char **argv, IInferenceTestCaseProvider &testCaseProvider, InferenceTestOptions &outParams)
 Parse the command line of an ArmNN (or referencetests) inference test program. More...
 
bool ValidateDirectory (std::string &dir)
 
bool InferenceTest (const InferenceTestOptions &params, const std::vector< unsigned int > &defaultTestCaseIds, IInferenceTestCaseProvider &testCaseProvider)
 
template<typename TConstructTestCaseProvider >
int InferenceTestMain (int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)
 
template<typename TDatabase , typename TParser , typename TConstructDatabaseCallable >
int ClassifierInferenceTestMain (int argc, char *argv[], const char *modelFilename, bool isModelBinary, const char *inputBindingName, const char *outputBindingName, const std::vector< unsigned int > &defaultTestCaseIds, TConstructDatabaseCallable constructDatabase, const armnn::TensorShape *inputTensorShape=nullptr)
 

Typedef Documentation

◆ TContainer

using TContainer = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char> >

Definition at line 29 of file InferenceTest.inl.

Enumeration Type Documentation

◆ TestCaseResult

enum TestCaseResult
strong
Enumerator
Ok 

The test completed without any errors.

Failed 

The test failed (e.g.

the prediction didn't match the validation file). This will eventually fail the whole program but the remaining test cases will still be run.

Abort 

The test failed with a fatal error. The remaining tests will not be run.

Definition at line 71 of file InferenceTest.hpp.

72 {
73  /// The test completed without any errors.
74  Ok,
75  /// The test failed (e.g. the prediction didn't match the validation file).
76  /// This will eventually fail the whole program but the remaining test cases will still be run.
77  Failed,
78  /// The test failed with a fatal error. The remaining tests will not be run.
79  Abort
80 };
The test failed with a fatal error. The remaining tests will not be run.

Function Documentation

◆ ClassifierInferenceTestMain()

int ClassifierInferenceTestMain ( int  argc,
char *  argv[],
const char *  modelFilename,
bool  isModelBinary,
const char *  inputBindingName,
const char *  outputBindingName,
const std::vector< unsigned int > &  defaultTestCaseIds,
TConstructDatabaseCallable  constructDatabase,
const armnn::TensorShape inputTensorShape = nullptr 
)

Definition at line 353 of file InferenceTest.inl.

References ARMNN_ASSERT, InferenceTestMain(), Params::m_ModelPath, and ValidateDirectory().

Referenced by main().

363 {
364  ARMNN_ASSERT(modelFilename);
365  ARMNN_ASSERT(inputBindingName);
366  ARMNN_ASSERT(outputBindingName);
367 
368  return InferenceTestMain(argc, argv, defaultTestCaseIds,
369  [=]
370  ()
371  {
374 
375  return make_unique<TestCaseProvider>(constructDatabase,
376  [&]
377  (const InferenceTestOptions &commonOptions,
378  typename InferenceModel::CommandLineOptions modelOptions)
379  {
380  if (!ValidateDirectory(modelOptions.m_ModelDir))
381  {
382  return std::unique_ptr<InferenceModel>();
383  }
384 
385  typename InferenceModel::Params modelParams;
386  modelParams.m_ModelPath = modelOptions.m_ModelDir + modelFilename;
387  modelParams.m_InputBindings = { inputBindingName };
388  modelParams.m_OutputBindings = { outputBindingName };
389 
390  if (inputTensorShape)
391  {
392  modelParams.m_InputShapes.push_back(*inputTensorShape);
393  }
394 
395  modelParams.m_IsModelBinary = isModelBinary;
396  modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
397  modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
398  modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
399 
400  return std::make_unique<InferenceModel>(modelParams,
401  commonOptions.m_EnableProfiling,
402  commonOptions.m_DynamicBackendsPath);
403  });
404  });
405 }
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
bool ValidateDirectory(std::string &dir)
int InferenceTestMain(int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)

◆ InferenceTest()

bool InferenceTest ( const InferenceTestOptions params,
const std::vector< unsigned int > &  defaultTestCaseIds,
IInferenceTestCaseProvider testCaseProvider 
)

Definition at line 115 of file InferenceTest.cpp.

References ARMNN_ASSERT_MSG, ARMNN_LOG, armnn::error, IInferenceTestCaseProvider::GetTestCase(), armnn::info, InferenceTestOptions::m_EnableProfiling, InferenceTestOptions::m_InferenceTimesFile, InferenceTestOptions::m_IterationCount, IInferenceTestCaseProvider::OnInferenceTestFinished(), armnn::warning, and Exception::what().

Referenced by InferenceTestMain().

118 {
119 #if !defined (NDEBUG)
120  if (params.m_IterationCount > 0) // If just running a few select images then don't bother to warn.
121  {
122  ARMNN_LOG(warning) << "Performance test running in DEBUG build - results may be inaccurate.";
123  }
124 #endif
125 
126  double totalTime = 0;
127  unsigned int nbProcessed = 0;
128  bool success = true;
129 
130  // Opens the file to write inference times too, if needed.
131  ofstream inferenceTimesFile;
132  const bool recordInferenceTimes = !params.m_InferenceTimesFile.empty();
133  if (recordInferenceTimes)
134  {
135  inferenceTimesFile.open(params.m_InferenceTimesFile.c_str(), ios_base::trunc | ios_base::out);
136  if (!inferenceTimesFile.good())
137  {
138  ARMNN_LOG(error) << "Failed to open inference times file for writing: "
139  << params.m_InferenceTimesFile;
140  return false;
141  }
142  }
143 
144  // Create a profiler and register it for the current thread.
145  std::unique_ptr<IProfiler> profiler = std::make_unique<IProfiler>();
146  ProfilerManager::GetInstance().RegisterProfiler(profiler.get());
147 
148  // Enable profiling if requested.
149  profiler->EnableProfiling(params.m_EnableProfiling);
150 
151  // Run a single test case to 'warm-up' the model. The first one can sometimes take up to 10x longer
152  std::unique_ptr<IInferenceTestCase> warmupTestCase = testCaseProvider.GetTestCase(0);
153  if (warmupTestCase == nullptr)
154  {
155  ARMNN_LOG(error) << "Failed to load test case";
156  return false;
157  }
158 
159  try
160  {
161  warmupTestCase->Run();
162  }
163  catch (const TestFrameworkException& testError)
164  {
165  ARMNN_LOG(error) << testError.what();
166  return false;
167  }
168 
169  const unsigned int nbTotalToProcess = params.m_IterationCount > 0 ? params.m_IterationCount
170  : static_cast<unsigned int>(defaultTestCaseIds.size());
171 
172  for (; nbProcessed < nbTotalToProcess; nbProcessed++)
173  {
174  const unsigned int testCaseId = params.m_IterationCount > 0 ? nbProcessed : defaultTestCaseIds[nbProcessed];
175  std::unique_ptr<IInferenceTestCase> testCase = testCaseProvider.GetTestCase(testCaseId);
176 
177  if (testCase == nullptr)
178  {
179  ARMNN_LOG(error) << "Failed to load test case";
180  return false;
181  }
182 
183  time_point<high_resolution_clock> predictStart;
184  time_point<high_resolution_clock> predictEnd;
185 
186  TestCaseResult result = TestCaseResult::Ok;
187 
188  try
189  {
190  predictStart = high_resolution_clock::now();
191 
192  testCase->Run();
193 
194  predictEnd = high_resolution_clock::now();
195 
196  // duration<double> will convert the time difference into seconds as a double by default.
197  double timeTakenS = duration<double>(predictEnd - predictStart).count();
198  totalTime += timeTakenS;
199 
200  // Outputss inference times, if needed.
201  if (recordInferenceTimes)
202  {
203  inferenceTimesFile << testCaseId << " " << (timeTakenS * 1000.0) << std::endl;
204  }
205 
206  result = testCase->ProcessResult(params);
207 
208  }
209  catch (const TestFrameworkException& testError)
210  {
211  ARMNN_LOG(error) << testError.what();
212  result = TestCaseResult::Abort;
213  }
214 
215  switch (result)
216  {
217  case TestCaseResult::Ok:
218  break;
219  case TestCaseResult::Abort:
220  return false;
221  case TestCaseResult::Failed:
222  // This test failed so we will fail the entire program eventually, but keep going for now.
223  success = false;
224  break;
225  default:
226  ARMNN_ASSERT_MSG(false, "Unexpected TestCaseResult");
227  return false;
228  }
229  }
230 
231  const double averageTimePerTestCaseMs = totalTime / nbProcessed * 1000.0f;
232 
233  ARMNN_LOG(info) << std::fixed << std::setprecision(3) <<
234  "Total time for " << nbProcessed << " test cases: " << totalTime << " seconds";
235  ARMNN_LOG(info) << std::fixed << std::setprecision(3) <<
236  "Average time per test case: " << averageTimePerTestCaseMs << " ms";
237 
238  // if profiling is enabled print out the results
239  if (profiler && profiler->IsProfilingEnabled())
240  {
241  profiler->Print(std::cout);
242  }
243 
244  if (!success)
245  {
246  ARMNN_LOG(error) << "One or more test cases failed";
247  return false;
248  }
249 
250  return testCaseProvider.OnInferenceTestFinished();
251 }
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
virtual std::unique_ptr< IInferenceTestCase > GetTestCase(unsigned int testCaseId)=0

◆ InferenceTestMain()

int InferenceTestMain ( int  argc,
char *  argv[],
const std::vector< unsigned int > &  defaultTestCaseIds,
TConstructTestCaseProvider  constructTestCaseProvider 
)

Definition at line 302 of file InferenceTest.inl.

References ARMNN_LOG, armnn::ConfigureLogging(), armnn::Debug, armnn::fatal, InferenceTest(), armnn::Info, ParseCommandLine(), and Exception::what().

Referenced by ClassifierInferenceTestMain(), and main().

306 {
307  // Configures logging for both the ARMNN library and this test program.
308 #ifdef NDEBUG
310 #else
312 #endif
313  armnn::ConfigureLogging(true, true, level);
314 
315  try
316  {
317  std::unique_ptr<IInferenceTestCaseProvider> testCaseProvider = constructTestCaseProvider();
318  if (!testCaseProvider)
319  {
320  return 1;
321  }
322 
323  InferenceTestOptions inferenceTestOptions;
324  if (!ParseCommandLine(argc, argv, *testCaseProvider, inferenceTestOptions))
325  {
326  return 1;
327  }
328 
329  const bool success = InferenceTest(inferenceTestOptions, defaultTestCaseIds, *testCaseProvider);
330  return success ? 0 : 1;
331  }
332  catch (armnn::Exception const& e)
333  {
334  ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
335  return 1;
336  }
337 }
bool ParseCommandLine(int argc, char **argv, IInferenceTestCaseProvider &testCaseProvider, InferenceTestOptions &outParams)
Parse the command line of an ArmNN (or referencetests) inference test program.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:18
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
bool InferenceTest(const InferenceTestOptions &params, const std::vector< unsigned int > &defaultTestCaseIds, IInferenceTestCaseProvider &testCaseProvider)
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
LogSeverity
Definition: Utils.hpp:13

◆ ParseCommandLine()

bool ParseCommandLine ( int  argc,
char **  argv,
IInferenceTestCaseProvider testCaseProvider,
InferenceTestOptions outParams 
)

Parse the command line of an ArmNN (or referencetests) inference test program.

Returns
false if any error occurred during options processing, otherwise true

Definition at line 28 of file InferenceTest.cpp.

References IInferenceTestCaseProvider::AddCommandLineOptions(), ARMNN_ASSERT_MSG, CheckRequiredOptions(), InferenceTestOptions::m_EnableProfiling, InferenceTestOptions::m_InferenceTimesFile, InferenceTestOptions::m_IterationCount, and IInferenceTestCaseProvider::ProcessCommandLineOptions().

Referenced by InferenceTestMain().

30 {
31  cxxopts::Options options("InferenceTest", "Inference iteration parameters");
32 
33  try
34  {
35  // Adds generic options needed for all inference tests.
36  options
37  .allow_unrecognised_options()
38  .add_options()
39  ("h,help", "Display help messages")
40  ("i,iterations", "Sets the number of inferences to perform. If unset, will only be run once.",
41  cxxopts::value<unsigned int>(outParams.m_IterationCount)->default_value("0"))
42  ("inference-times-file",
43  "If non-empty, each individual inference time will be recorded and output to this file",
44  cxxopts::value<std::string>(outParams.m_InferenceTimesFile)->default_value(""))
45  ("e,event-based-profiling", "Enables built in profiler. If unset, defaults to off.",
46  cxxopts::value<bool>(outParams.m_EnableProfiling)->default_value("0"));
47 
48  std::vector<std::string> required; //to be passed as reference to derived inference tests
49 
50  // Adds options specific to the ITestCaseProvider.
51  testCaseProvider.AddCommandLineOptions(options, required);
52 
53  auto result = options.parse(argc, argv);
54 
55  if (result.count("help"))
56  {
57  std::cout << options.help() << std::endl;
58  return false;
59  }
60 
61  CheckRequiredOptions(result, required);
62 
63  }
64  catch (const cxxopts::OptionException& e)
65  {
66  std::cerr << e.what() << std::endl << options.help() << std::endl;
67  return false;
68  }
69  catch (const std::exception& e)
70  {
71  // Coverity points out that default_value(...) can throw a bad_lexical_cast,
72  // and that desc.add_options() can throw boost::io::too_few_args.
73  // They really won't in any of these cases.
74  ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
75  std::cerr << "Fatal internal error: " << e.what() << std::endl;
76  return false;
77  }
78 
79  if (!testCaseProvider.ProcessCommandLineOptions(outParams))
80  {
81  return false;
82  }
83 
84  return true;
85 }
bool CheckRequiredOptions(const cxxopts::ParseResult &result, const std::vector< std::string > &required)
Ensure all mandatory command-line parameters have been passed to cxxopts.
virtual void AddCommandLineOptions(cxxopts::Options &options, std::vector< std::string > &required)
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions)

◆ ValidateDirectory()

bool ValidateDirectory ( std::string &  dir)

Definition at line 87 of file InferenceTest.cpp.

Referenced by ClassifierInferenceTestMain(), main(), ClassifierTestCaseProvider< TDatabase, InferenceModel >::ProcessCommandLineOptions(), and YoloTestCaseProvider< Model >::ProcessCommandLineOptions().

88 {
89  if (dir.empty())
90  {
91  std::cerr << "No directory specified" << std::endl;
92  return false;
93  }
94 
95  if (dir[dir.length() - 1] != '/')
96  {
97  dir += "/";
98  }
99 
100  if (!fs::exists(dir))
101  {
102  std::cerr << "Given directory " << dir << " does not exist" << std::endl;
103  return false;
104  }
105 
106  if (!fs::is_directory(dir))
107  {
108  std::cerr << "Given directory [" << dir << "] is not a directory" << std::endl;
109  return false;
110  }
111 
112  return true;
113 }