21.08
|
#include <InferenceModel.hpp>
Classes | |
struct | CommandLineOptions |
Public Types | |
using | DataType = TDataType |
using | Params = InferenceModelInternal::Params |
using | QuantizationParams = InferenceModelInternal::QuantizationParams |
using | TContainer = mapbox::util::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char >, std::vector< int8_t > > |
Public Member Functions | |
InferenceModel (const Params ¶ms, bool enableProfiling, const std::string &dynamicBackendsPath, const std::shared_ptr< armnn::IRuntime > &runtime=nullptr) | |
void | CheckInputIndexIsValid (unsigned int inputIndex) const |
void | CheckOutputIndexIsValid (unsigned int outputIndex) const |
unsigned int | GetInputSize (unsigned int inputIndex=0u) const |
unsigned int | GetOutputSize (unsigned int outputIndex=0u) const |
std::chrono::duration< double, std::milli > | Run (const std::vector< TContainer > &inputContainers, std::vector< TContainer > &outputContainers) |
std::tuple< unsigned int, std::chrono::duration< double, std::milli > > | RunAsync (armnn::experimental::IWorkingMemHandle &workingMemHandleRef, const std::vector< TContainer > &inputContainers, std::vector< TContainer > &outputContainers, unsigned int inferenceID) |
void | RunAsync (const std::vector< TContainer > &inputContainers, std::vector< TContainer > &outputContainers, std::shared_ptr< armnn::IAsyncExecutionCallback > cb) |
const armnn::BindingPointInfo & | GetInputBindingInfo (unsigned int inputIndex=0u) const |
const std::vector< armnn::BindingPointInfo > & | GetInputBindingInfos () const |
const armnn::BindingPointInfo & | GetOutputBindingInfo (unsigned int outputIndex=0u) const |
const std::vector< armnn::BindingPointInfo > & | GetOutputBindingInfos () const |
QuantizationParams | GetQuantizationParams (unsigned int outputIndex=0u) const |
QuantizationParams | GetInputQuantizationParams (unsigned int inputIndex=0u) const |
std::vector< QuantizationParams > | GetAllQuantizationParams () const |
std::unique_ptr< armnn::experimental::IWorkingMemHandle > | CreateWorkingMemHandle () |
Static Public Member Functions | |
static void | AddCommandLineOptions (cxxopts::Options &options, CommandLineOptions &cLineOptions, std::vector< std::string > &required) |
Definition at line 340 of file InferenceModel.hpp.
using DataType = TDataType |
Definition at line 343 of file InferenceModel.hpp.
using Params = InferenceModelInternal::Params |
Definition at line 344 of file InferenceModel.hpp.
Definition at line 345 of file InferenceModel.hpp.
using TContainer = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>, std::vector<int8_t> > |
Definition at line 347 of file InferenceModel.hpp.
|
inline |
Definition at line 405 of file InferenceModel.hpp.
References ARMNN_LOG, ARMNN_SCOPED_HEAP_PROFILING, CreateNetworkImpl< IParser >::Create(), IRuntime::Create(), armnn::Failure, armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::InferAndValidate, Params::m_AsyncEnabled, Params::m_CachedNetworkFilePath, Params::m_ComputeDevices, OptimizerOptions::m_Debug, Params::m_DynamicBackendsPath, IRuntime::CreationOptions::m_DynamicBackendsPath, Params::m_EnableBf16TurboMode, Params::m_EnableFastMath, Params::m_EnableFp16TurboMode, IRuntime::CreationOptions::m_EnableGpuProfiling, Params::m_InferOutputShape, Params::m_InputBindings, Params::m_MLGOTuningFilePath, OptimizerOptions::m_ModelOptions, Params::m_ModelPath, Params::m_NumberOfThreads, Params::m_OutputBindings, Params::m_OutputDetailsToStdOut, Params::m_PrintIntermediateLayers, OptimizerOptions::m_ReduceFp32ToBf16, OptimizerOptions::m_ReduceFp32ToFp16, Params::m_SaveCachedNetwork, OptimizerOptions::m_shapeInferenceMethod, Params::m_ThreadPoolSize, Params::m_VisualizePostOptimizationModel, armnn::Optimize(), armnn::Undefined, and armnn::ValidateOnly.
|
inlinestatic |
Definition at line 367 of file InferenceModel.hpp.
References armnn::BackendRegistryInstance(), BackendRegistry::GetBackendIdsAsString(), InferenceModel< IParser, TDataType >::CommandLineOptions::m_ComputeDevices, InferenceModel< IParser, TDataType >::CommandLineOptions::m_DynamicBackendsPath, InferenceModel< IParser, TDataType >::CommandLineOptions::m_EnableBf16TurboMode, InferenceModel< IParser, TDataType >::CommandLineOptions::m_EnableFp16TurboMode, InferenceModel< IParser, TDataType >::CommandLineOptions::m_Labels, InferenceModel< IParser, TDataType >::CommandLineOptions::m_ModelDir, and InferenceModel< IParser, TDataType >::CommandLineOptions::m_VisualizePostOptimizationModel.
Referenced by ClassifierTestCaseProvider< TDatabase, InferenceModel >::AddCommandLineOptions().
|
inline |
Definition at line 522 of file InferenceModel.hpp.
References Params::m_InputBindings.
|
inline |
Definition at line 530 of file InferenceModel.hpp.
References Params::m_OutputBindings.
|
inline |
Definition at line 734 of file InferenceModel.hpp.
References Params::m_DynamicBackendsPath, Params::m_InputBindings, Params::m_OutputBindings, armnnUtils::MakeInputTensors(), MakeInputTensors(), armnnUtils::MakeOutputTensors(), and MakeOutputTensors().
Referenced by MainImpl().
|
inline |
Definition at line 724 of file InferenceModel.hpp.
References Params::m_OutputBindings.
|
inline |
Definition at line 688 of file InferenceModel.hpp.
References Params::m_InputBindings.
Referenced by main().
|
inline |
Definition at line 694 of file InferenceModel.hpp.
References Params::m_InputBindings.
|
inline |
Definition at line 717 of file InferenceModel.hpp.
References Params::m_InputBindings.
Referenced by MainImpl().
|
inline |
Definition at line 538 of file InferenceModel.hpp.
References Params::m_InputBindings.
Referenced by MainImpl().
|
inline |
Definition at line 699 of file InferenceModel.hpp.
References Params::m_OutputBindings.
|
inline |
Definition at line 705 of file InferenceModel.hpp.
References Params::m_OutputBindings.
Referenced by MainImpl().
|
inline |
Definition at line 544 of file InferenceModel.hpp.
References Params::m_OutputBindings.
Referenced by main(), and MainImpl().
|
inline |
Definition at line 710 of file InferenceModel.hpp.
References Params::m_OutputBindings.
|
inline |
Definition at line 550 of file InferenceModel.hpp.
References armnn::Failure, armnn::GetTimeDuration(), armnn::GetTimeNow(), MakeInputTensors(), MakeOutputTensors(), and armnn::numeric_cast().
Referenced by MainImpl().
|
inline |
Definition at line 598 of file InferenceModel.hpp.
References armnn::Failure, armnn::GetTimeDuration(), armnn::GetTimeNow(), MakeInputTensors(), MakeOutputTensors(), and armnn::numeric_cast().
Referenced by MainImpl().
|
inline |
Definition at line 651 of file InferenceModel.hpp.
References MakeInputTensors(), MakeOutputTensors(), armnn::Medium, and armnn::numeric_cast().