ArmNN
 21.08
InferenceModel< IParser, TDataType > Member List

This is the complete list of members for InferenceModel< IParser, TDataType >, including all inherited members.

AddCommandLineOptions(cxxopts::Options &options, CommandLineOptions &cLineOptions, std::vector< std::string > &required)InferenceModel< IParser, TDataType >inlinestatic
CheckInputIndexIsValid(unsigned int inputIndex) constInferenceModel< IParser, TDataType >inline
CheckOutputIndexIsValid(unsigned int outputIndex) constInferenceModel< IParser, TDataType >inline
CreateWorkingMemHandle()InferenceModel< IParser, TDataType >inline
DataType typedefInferenceModel< IParser, TDataType >
GetAllQuantizationParams() constInferenceModel< IParser, TDataType >inline
GetInputBindingInfo(unsigned int inputIndex=0u) constInferenceModel< IParser, TDataType >inline
GetInputBindingInfos() constInferenceModel< IParser, TDataType >inline
GetInputQuantizationParams(unsigned int inputIndex=0u) constInferenceModel< IParser, TDataType >inline
GetInputSize(unsigned int inputIndex=0u) constInferenceModel< IParser, TDataType >inline
GetOutputBindingInfo(unsigned int outputIndex=0u) constInferenceModel< IParser, TDataType >inline
GetOutputBindingInfos() constInferenceModel< IParser, TDataType >inline
GetOutputSize(unsigned int outputIndex=0u) constInferenceModel< IParser, TDataType >inline
GetQuantizationParams(unsigned int outputIndex=0u) constInferenceModel< IParser, TDataType >inline
InferenceModel(const Params &params, bool enableProfiling, const std::string &dynamicBackendsPath, const std::shared_ptr< armnn::IRuntime > &runtime=nullptr)InferenceModel< IParser, TDataType >inline
Params typedefInferenceModel< IParser, TDataType >
QuantizationParams typedefInferenceModel< IParser, TDataType >
Run(const std::vector< TContainer > &inputContainers, std::vector< TContainer > &outputContainers)InferenceModel< IParser, TDataType >inline
RunAsync(armnn::experimental::IWorkingMemHandle &workingMemHandleRef, const std::vector< TContainer > &inputContainers, std::vector< TContainer > &outputContainers, unsigned int inferenceID)InferenceModel< IParser, TDataType >inline
RunAsync(const std::vector< TContainer > &inputContainers, std::vector< TContainer > &outputContainers, std::shared_ptr< armnn::IAsyncExecutionCallback > cb)InferenceModel< IParser, TDataType >inline
TContainer typedefInferenceModel< IParser, TDataType >