ArmNN
 21.11
InferenceModel< IParser, TDataType >::CommandLineOptions Struct Reference

#include <InferenceModel.hpp>

Public Member Functions

std::vector< armnn::BackendIdGetComputeDevicesAsBackendIds ()
 

Public Attributes

std::string m_ModelDir
 
std::vector< std::string > m_ComputeDevices
 
std::string m_DynamicBackendsPath
 
bool m_VisualizePostOptimizationModel
 
bool m_EnableFp16TurboMode
 
bool m_EnableBf16TurboMode
 
std::string m_Labels
 

Detailed Description

template<typename IParser, typename TDataType>
struct InferenceModel< IParser, TDataType >::CommandLineOptions

Definition at line 380 of file InferenceModel.hpp.

Member Function Documentation

◆ GetComputeDevicesAsBackendIds()

std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds ( )
inline

Definition at line 390 of file InferenceModel.hpp.

391  {
392  std::vector<armnn::BackendId> backendIds;
393  std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
394  return backendIds;
395  }
std::vector< std::string > m_ComputeDevices

Member Data Documentation

◆ m_ComputeDevices

std::vector<std::string> m_ComputeDevices

◆ m_DynamicBackendsPath

std::string m_DynamicBackendsPath

◆ m_EnableBf16TurboMode

bool m_EnableBf16TurboMode

◆ m_EnableFp16TurboMode

bool m_EnableFp16TurboMode

◆ m_Labels

std::string m_Labels

◆ m_ModelDir

std::string m_ModelDir

◆ m_VisualizePostOptimizationModel

bool m_VisualizePostOptimizationModel

The documentation for this struct was generated from the following file: