ArmNN
 21.02
Params Struct Reference

#include <InferenceModel.hpp>

Public Member Functions

 Params ()
 

Public Attributes

std::string m_ModelPath
 
std::vector< std::string > m_InputBindings
 
std::vector< armnn::TensorShapem_InputShapes
 
std::vector< std::string > m_OutputBindings
 
std::vector< armnn::BackendIdm_ComputeDevices
 
std::string m_DynamicBackendsPath
 
size_t m_SubgraphId
 
bool m_IsModelBinary
 
bool m_VisualizePostOptimizationModel
 
bool m_EnableFp16TurboMode
 
bool m_EnableBf16TurboMode
 
bool m_PrintIntermediateLayers
 
bool m_ParseUnsupported
 
bool m_InferOutputShape
 
bool m_EnableFastMath
 
bool m_SaveCachedNetwork
 
std::string m_CachedNetworkFilePath
 
unsigned int m_NumberOfThreads
 
std::string m_MLGOTuningFilePath
 

Detailed Description

Definition at line 83 of file InferenceModel.hpp.

Constructor & Destructor Documentation

◆ Params()

Params ( )
inline

Definition at line 106 of file InferenceModel.hpp.

References Params::m_CachedNetworkFilePath, Params::m_EnableBf16TurboMode, Params::m_EnableFastMath, Params::m_EnableFp16TurboMode, Params::m_InferOutputShape, Params::m_IsModelBinary, Params::m_MLGOTuningFilePath, Params::m_NumberOfThreads, Params::m_ParseUnsupported, Params::m_PrintIntermediateLayers, Params::m_SaveCachedNetwork, Params::m_SubgraphId, and Params::m_VisualizePostOptimizationModel.

107  : m_ComputeDevices{}
108  , m_SubgraphId(0)
109  , m_IsModelBinary(true)
111  , m_EnableFp16TurboMode(false)
112  , m_EnableBf16TurboMode(false)
114  , m_ParseUnsupported(false)
115  , m_InferOutputShape(false)
116  , m_EnableFastMath(false)
117  , m_SaveCachedNetwork(false)
119  , m_NumberOfThreads(0)
121  {}
std::vector< armnn::BackendId > m_ComputeDevices

Member Data Documentation

◆ m_CachedNetworkFilePath

std::string m_CachedNetworkFilePath

◆ m_ComputeDevices

std::vector<armnn::BackendId> m_ComputeDevices

◆ m_DynamicBackendsPath

◆ m_EnableBf16TurboMode

bool m_EnableBf16TurboMode

◆ m_EnableFastMath

bool m_EnableFastMath

◆ m_EnableFp16TurboMode

bool m_EnableFp16TurboMode

◆ m_InferOutputShape

bool m_InferOutputShape

◆ m_InputBindings

◆ m_InputShapes

std::vector<armnn::TensorShape> m_InputShapes

Definition at line 87 of file InferenceModel.hpp.

Referenced by CreateNetworkImpl< IParser >::Create(), and MainImpl().

◆ m_IsModelBinary

bool m_IsModelBinary

◆ m_MLGOTuningFilePath

std::string m_MLGOTuningFilePath

◆ m_ModelPath

◆ m_NumberOfThreads

unsigned int m_NumberOfThreads

◆ m_OutputBindings

◆ m_ParseUnsupported

bool m_ParseUnsupported

◆ m_PrintIntermediateLayers

bool m_PrintIntermediateLayers

◆ m_SaveCachedNetwork

bool m_SaveCachedNetwork

◆ m_SubgraphId

size_t m_SubgraphId

◆ m_VisualizePostOptimizationModel

bool m_VisualizePostOptimizationModel

The documentation for this struct was generated from the following file: