21.11
|
#include <InferenceModel.hpp>
Classes | |
struct | CommandLineOptions |
Public Types | |
using | DataType = TDataType |
using | Params = InferenceModelInternal::Params |
using | QuantizationParams = InferenceModelInternal::QuantizationParams |
Public Member Functions | |
InferenceModel (const Params ¶ms, bool enableProfiling, const std::string &dynamicBackendsPath, const std::shared_ptr< armnn::IRuntime > &runtime=nullptr) | |
void | CheckInputIndexIsValid (unsigned int inputIndex) const |
void | CheckOutputIndexIsValid (unsigned int outputIndex) const |
unsigned int | GetInputSize (unsigned int inputIndex=0u) const |
unsigned int | GetOutputSize (unsigned int outputIndex=0u) const |
std::chrono::duration< double, std::milli > | Run (const std::vector< armnnUtils::TContainer > &inputContainers, std::vector< armnnUtils::TContainer > &outputContainers) |
std::tuple< unsigned int, std::chrono::duration< double, std::milli > > | RunAsync (armnn::experimental::IWorkingMemHandle &workingMemHandleRef, const std::vector< armnnUtils::TContainer > &inputContainers, std::vector< armnnUtils::TContainer > &outputContainers, unsigned int inferenceID) |
void | RunAsync (const std::vector< armnnUtils::TContainer > &inputContainers, std::vector< armnnUtils::TContainer > &outputContainers, std::shared_ptr< armnn::IAsyncExecutionCallback > cb) |
const armnn::BindingPointInfo & | GetInputBindingInfo (unsigned int inputIndex=0u) const |
const std::vector< armnn::BindingPointInfo > & | GetInputBindingInfos () const |
const armnn::BindingPointInfo & | GetOutputBindingInfo (unsigned int outputIndex=0u) const |
const std::vector< armnn::BindingPointInfo > & | GetOutputBindingInfos () const |
QuantizationParams | GetQuantizationParams (unsigned int outputIndex=0u) const |
QuantizationParams | GetInputQuantizationParams (unsigned int inputIndex=0u) const |
std::vector< QuantizationParams > | GetAllQuantizationParams () const |
std::unique_ptr< armnn::experimental::IWorkingMemHandle > | CreateWorkingMemHandle () |
Static Public Member Functions | |
static void | AddCommandLineOptions (cxxopts::Options &options, CommandLineOptions &cLineOptions, std::vector< std::string > &required) |
Definition at line 372 of file InferenceModel.hpp.
using DataType = TDataType |
Definition at line 375 of file InferenceModel.hpp.
using Params = InferenceModelInternal::Params |
Definition at line 376 of file InferenceModel.hpp.
Definition at line 377 of file InferenceModel.hpp.
|
inline |
Definition at line 436 of file InferenceModel.hpp.
References ARMNN_LOG, ARMNN_SCOPED_HEAP_PROFILING, CreateNetworkImpl< IParser >::Create(), IRuntime::Create(), armnn::DetailsOnly, armnn::DetailsWithEvents, armnn::Failure, armnn::GetTimeDuration(), armnn::GetTimeNow(), armnn::InferAndValidate, Params::m_AsyncEnabled, Params::m_CachedNetworkFilePath, Params::m_ComputeDevices, OptimizerOptions::m_Debug, Params::m_DynamicBackendsPath, IRuntime::CreationOptions::m_DynamicBackendsPath, Params::m_EnableBf16TurboMode, Params::m_EnableFastMath, Params::m_EnableFp16TurboMode, IRuntime::CreationOptions::m_EnableGpuProfiling, Params::m_InferOutputShape, Params::m_InputBindings, Params::m_MLGOTuningFilePath, OptimizerOptions::m_ModelOptions, Params::m_ModelPath, Params::m_NumberOfThreads, Params::m_OutputBindings, Params::m_OutputDetailsOnlyToStdOut, Params::m_OutputDetailsToStdOut, Params::m_PrintIntermediateLayers, OptimizerOptions::m_ProfilingEnabled, OptimizerOptions::m_ReduceFp32ToBf16, OptimizerOptions::m_ReduceFp32ToFp16, Params::m_SaveCachedNetwork, OptimizerOptions::m_shapeInferenceMethod, Params::m_ThreadPoolSize, Params::m_VisualizePostOptimizationModel, armnn::Optimize(), armnn::Undefined, and armnn::ValidateOnly.
|
inlinestatic |
Definition at line 398 of file InferenceModel.hpp.
References armnn::BackendRegistryInstance(), BackendRegistry::GetBackendIdsAsString(), InferenceModel< IParser, TDataType >::CommandLineOptions::m_ComputeDevices, InferenceModel< IParser, TDataType >::CommandLineOptions::m_DynamicBackendsPath, InferenceModel< IParser, TDataType >::CommandLineOptions::m_EnableBf16TurboMode, InferenceModel< IParser, TDataType >::CommandLineOptions::m_EnableFp16TurboMode, InferenceModel< IParser, TDataType >::CommandLineOptions::m_Labels, InferenceModel< IParser, TDataType >::CommandLineOptions::m_ModelDir, and InferenceModel< IParser, TDataType >::CommandLineOptions::m_VisualizePostOptimizationModel.
Referenced by ClassifierTestCaseProvider< TDatabase, InferenceModel >::AddCommandLineOptions().
|
inline |
Definition at line 560 of file InferenceModel.hpp.
References Params::m_InputBindings.
|
inline |
Definition at line 568 of file InferenceModel.hpp.
References Params::m_OutputBindings.
|
inline |
Definition at line 772 of file InferenceModel.hpp.
References Params::m_DynamicBackendsPath, Params::m_InputBindings, Params::m_OutputBindings, armnnUtils::MakeInputTensors(), MakeInputTensors(), armnnUtils::MakeOutputTensors(), and MakeOutputTensors().
Referenced by MainImpl().
|
inline |
Definition at line 762 of file InferenceModel.hpp.
References Params::m_OutputBindings.
|
inline |
Definition at line 726 of file InferenceModel.hpp.
References Params::m_InputBindings.
Referenced by main().
|
inline |
Definition at line 732 of file InferenceModel.hpp.
References Params::m_InputBindings.
|
inline |
Definition at line 755 of file InferenceModel.hpp.
References Params::m_InputBindings.
Referenced by MainImpl().
|
inline |
Definition at line 576 of file InferenceModel.hpp.
References Params::m_InputBindings.
Referenced by MainImpl().
|
inline |
Definition at line 737 of file InferenceModel.hpp.
References Params::m_OutputBindings.
Referenced by MainImpl().
|
inline |
Definition at line 743 of file InferenceModel.hpp.
References Params::m_OutputBindings.
Referenced by MainImpl().
|
inline |
Definition at line 582 of file InferenceModel.hpp.
References Params::m_OutputBindings.
Referenced by main(), and MainImpl().
|
inline |
Definition at line 748 of file InferenceModel.hpp.
References Params::m_OutputBindings.
|
inline |
Definition at line 588 of file InferenceModel.hpp.
References armnn::Failure, armnn::GetTimeDuration(), armnn::GetTimeNow(), MakeInputTensors(), MakeOutputTensors(), and armnn::numeric_cast().
Referenced by MainImpl().
|
inline |
Definition at line 636 of file InferenceModel.hpp.
References armnn::Failure, armnn::GetTimeDuration(), armnn::GetTimeNow(), MakeInputTensors(), MakeOutputTensors(), and armnn::numeric_cast().
Referenced by MainImpl().
|
inline |
Definition at line 689 of file InferenceModel.hpp.
References MakeInputTensors(), MakeOutputTensors(), armnn::Medium, and armnn::numeric_cast().