22.08
|
#include <InferenceModel.hpp>
Public Member Functions | |
Params () | |
Public Attributes | |
std::string | m_ModelPath |
std::vector< std::string > | m_InputBindings |
std::vector< armnn::TensorShape > | m_InputShapes |
std::vector< std::string > | m_OutputBindings |
std::vector< armnn::BackendId > | m_ComputeDevices |
std::string | m_DynamicBackendsPath |
size_t | m_SubgraphId |
bool | m_AllowExpandedDims |
bool | m_IsModelBinary |
bool | m_VisualizePostOptimizationModel |
bool | m_EnableFp16TurboMode |
bool | m_EnableBf16TurboMode |
bool | m_PrintIntermediateLayers |
bool | m_ParseUnsupported |
bool | m_InferOutputShape |
bool | m_EnableFastMath |
bool | m_SaveCachedNetwork |
bool | m_OutputDetailsToStdOut |
bool | m_OutputDetailsOnlyToStdOut |
std::string | m_CachedNetworkFilePath |
unsigned int | m_NumberOfThreads |
std::string | m_MLGOTuningFilePath |
bool | m_AsyncEnabled |
size_t | m_ThreadPoolSize |
bool | m_ImportInputsIfAligned |
Definition at line 56 of file InferenceModel.hpp.
|
inline |
Definition at line 85 of file InferenceModel.hpp.
References Params::m_AllowExpandedDims, Params::m_AsyncEnabled, Params::m_CachedNetworkFilePath, Params::m_EnableBf16TurboMode, Params::m_EnableFastMath, Params::m_EnableFp16TurboMode, Params::m_ImportInputsIfAligned, Params::m_InferOutputShape, Params::m_IsModelBinary, Params::m_MLGOTuningFilePath, Params::m_NumberOfThreads, Params::m_OutputDetailsOnlyToStdOut, Params::m_OutputDetailsToStdOut, Params::m_ParseUnsupported, Params::m_PrintIntermediateLayers, Params::m_SaveCachedNetwork, Params::m_SubgraphId, Params::m_ThreadPoolSize, and Params::m_VisualizePostOptimizationModel.
bool m_AllowExpandedDims |
Definition at line 65 of file InferenceModel.hpp.
Referenced by CreateNetworkImpl< IParser >::Create(), and Params::Params().
bool m_AsyncEnabled |
Definition at line 80 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
std::string m_CachedNetworkFilePath |
Definition at line 77 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
std::vector<armnn::BackendId> m_ComputeDevices |
Definition at line 62 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and main().
std::string m_DynamicBackendsPath |
Definition at line 63 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::CreateWorkingMemHandle(), and InferenceModel< IParser, TDataType >::InferenceModel().
bool m_EnableBf16TurboMode |
Definition at line 69 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
bool m_EnableFastMath |
Definition at line 73 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
bool m_EnableFp16TurboMode |
Definition at line 68 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
bool m_ImportInputsIfAligned |
Definition at line 82 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::CreateWorkingMemHandle(), Params::Params(), and InferenceModel< IParser, TDataType >::Run().
bool m_InferOutputShape |
Definition at line 72 of file InferenceModel.hpp.
Referenced by CreateNetworkImpl< IParser >::Create(), InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
std::vector<std::string> m_InputBindings |
Definition at line 59 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::CheckInputIndexIsValid(), CreateNetworkImpl< IParser >::Create(), InferenceModel< IParser, TDataType >::CreateWorkingMemHandle(), InferenceModel< IParser, TDataType >::GetInputBindingInfo(), InferenceModel< IParser, TDataType >::GetInputBindingInfos(), InferenceModel< IParser, TDataType >::GetInputQuantizationParams(), InferenceModel< IParser, TDataType >::GetInputSize(), InferenceModel< IParser, TDataType >::InferenceModel(), and main().
std::vector<armnn::TensorShape> m_InputShapes |
Definition at line 60 of file InferenceModel.hpp.
Referenced by CreateNetworkImpl< IParser >::Create().
bool m_IsModelBinary |
Definition at line 66 of file InferenceModel.hpp.
Referenced by CreateNetworkImpl< IParser >::Create(), main(), and Params::Params().
std::string m_MLGOTuningFilePath |
Definition at line 79 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
std::string m_ModelPath |
Definition at line 58 of file InferenceModel.hpp.
Referenced by armnn::test::ClassifierInferenceTestMain(), CreateNetworkImpl< IParser >::Create(), InferenceModel< IParser, TDataType >::InferenceModel(), and main().
unsigned int m_NumberOfThreads |
Definition at line 78 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
std::vector<std::string> m_OutputBindings |
Definition at line 61 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::CheckOutputIndexIsValid(), CreateNetworkImpl< IParser >::Create(), InferenceModel< IParser, TDataType >::CreateWorkingMemHandle(), InferenceModel< IParser, TDataType >::GetAllQuantizationParams(), InferenceModel< IParser, TDataType >::GetOutputBindingInfo(), InferenceModel< IParser, TDataType >::GetOutputBindingInfos(), InferenceModel< IParser, TDataType >::GetOutputSize(), InferenceModel< IParser, TDataType >::GetQuantizationParams(), InferenceModel< IParser, TDataType >::InferenceModel(), and main().
bool m_OutputDetailsOnlyToStdOut |
Definition at line 76 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
bool m_OutputDetailsToStdOut |
Definition at line 75 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
bool m_ParseUnsupported |
Definition at line 71 of file InferenceModel.hpp.
Referenced by CreateNetworkImpl< IParser >::Create(), and Params::Params().
bool m_PrintIntermediateLayers |
Definition at line 70 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
bool m_SaveCachedNetwork |
Definition at line 74 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
size_t m_SubgraphId |
Definition at line 64 of file InferenceModel.hpp.
Referenced by CreateNetworkImpl< IParser >::Create(), and Params::Params().
size_t m_ThreadPoolSize |
Definition at line 81 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().
bool m_VisualizePostOptimizationModel |
Definition at line 67 of file InferenceModel.hpp.
Referenced by InferenceModel< IParser, TDataType >::InferenceModel(), and Params::Params().