21.11
|
#include "NetworkExecutionUtils/NetworkExecutionUtils.hpp"
#include "ExecuteNetworkProgramOptions.hpp"
#include <armnn/IAsyncExecutionCallback.hpp>
#include <AsyncExecutionCallback.hpp>
#include <armnn/Logging.hpp>
#include <armnnUtils/Filesystem.hpp>
#include <armnnUtils/TContainer.hpp>
#include <InferenceTest.hpp>
#include <future>
Go to the source code of this file.
Functions | |
bool | CheckInferenceTimeThreshold (const std::chrono::duration< double, std::milli > &duration, const double &thresholdTime) |
Given a measured duration and a threshold time tell the user whether we succeeded or not. More... | |
template<typename TParser , typename TDataType > | |
int | MainImpl (const ExecuteNetworkParams ¶ms, const std::shared_ptr< armnn::IRuntime > &runtime=nullptr) |
int | main (int argc, const char *argv[]) |
bool CheckInferenceTimeThreshold | ( | const std::chrono::duration< double, std::milli > & | duration, |
const double & | thresholdTime | ||
) |
Given a measured duration and a threshold time tell the user whether we succeeded or not.
duration | the measured inference duration. |
thresholdTime | the threshold time in milliseconds. |
Definition at line 47 of file ExecuteNetwork.cpp.
References ARMNN_LOG, ExecuteNetworkParams::ArmNNTfLiteDelegate, armnn::GetTimeDuration(), armnn::GetTimeNow(), ExecuteNetworkParams::m_DontPrintOutputs, ExecuteNetworkParams::m_GenerateTensorData, ExecuteNetworkParams::m_InputNames, ExecuteNetworkParams::m_InputTensorDataFilePaths, ExecuteNetworkParams::m_InputTensorShapes, ExecuteNetworkParams::m_InputTypes, ExecuteNetworkParams::m_Iterations, ExecuteNetworkParams::m_ModelPath, ExecuteNetworkParams::m_OutputNames, ExecuteNetworkParams::m_OutputTensorFiles, ExecuteNetworkParams::m_OutputTypes, IRuntime::CreationOptions::m_ProfilingOptions, ExecuteNetworkParams::m_TfLiteExecutor, ExecuteNetworkParams::m_ThresholdTime, armnn::numeric_cast(), DelegateOptions::SetExternalProfilingParams(), armnnDelegate::TfLiteArmnnDelegateCreate(), and armnnDelegate::TfLiteArmnnDelegateDelete().
Referenced by MainImpl().
int main | ( | int | argc, |
const char * | argv[] | ||
) |
Definition at line 846 of file ExecuteNetwork.cpp.
References ARMNN_LOG, ExecuteNetworkParams::ArmNNTfLiteDelegate, ExecuteNetworkParams::ArmNNTfLiteParser, armnn::ConfigureLogging(), IRuntime::Create(), armnn::Debug, armnn::Info, ExecuteNetworkParams::m_EnableProfiling, ProgramOptions::m_ExNetParams, ExecuteNetworkParams::m_ModelFormat, ExecuteNetworkParams::m_OutputDetailsOnlyToStdOut, ExecuteNetworkParams::m_OutputDetailsToStdOut, ProgramOptions::m_RuntimeOptions, ExecuteNetworkParams::m_TfLiteExecutor, ProgramOptions::ParseOptions(), and ExecuteNetworkParams::TfliteInterpreter.
int MainImpl | ( | const ExecuteNetworkParams & | params, |
const std::shared_ptr< armnn::IRuntime > & | runtime = nullptr |
||
) |
Definition at line 368 of file ExecuteNetwork.cpp.
References ARMNN_LOG, CheckInferenceTimeThreshold(), InferenceModel< IParser, TDataType >::CreateWorkingMemHandle(), armnn::Float32, InferenceModel< IParser, TDataType >::GetInputQuantizationParams(), InferenceModel< IParser, TDataType >::GetInputSize(), AsyncCallbackManager::GetNewCallback(), AsyncCallbackManager::GetNotifiedCallback(), InferenceModel< IParser, TDataType >::GetOutputBindingInfo(), InferenceModel< IParser, TDataType >::GetOutputBindingInfos(), InferenceModel< IParser, TDataType >::GetOutputSize(), armnn::GetTimeDuration(), armnn::GetTimeNow(), Params::m_AsyncEnabled, ExecuteNetworkParams::m_CachedNetworkFilePath, Params::m_CachedNetworkFilePath, ExecuteNetworkParams::m_ComputeDevices, Params::m_ComputeDevices, ExecuteNetworkParams::m_Concurrent, ExecuteNetworkParams::m_DequantizeOutput, ExecuteNetworkParams::m_DontPrintOutputs, ExecuteNetworkParams::m_DynamicBackendsPath, Params::m_DynamicBackendsPath, ExecuteNetworkParams::m_EnableBf16TurboMode, Params::m_EnableBf16TurboMode, ExecuteNetworkParams::m_EnableFastMath, Params::m_EnableFastMath, ExecuteNetworkParams::m_EnableFp16TurboMode, Params::m_EnableFp16TurboMode, ExecuteNetworkParams::m_EnableLayerDetails, ExecuteNetworkParams::m_EnableProfiling, ExecuteNetworkParams::m_GenerateTensorData, ExecuteNetworkParams::m_InferOutputShape, Params::m_InferOutputShape, Params::m_InputBindings, ExecuteNetworkParams::m_InputNames, Params::m_InputShapes, ExecuteNetworkParams::m_InputTensorDataFilePaths, ExecuteNetworkParams::m_InputTensorShapes, ExecuteNetworkParams::m_InputTypes, ExecuteNetworkParams::m_IsModelBinary, Params::m_IsModelBinary, ExecuteNetworkParams::m_Iterations, ExecuteNetworkParams::m_MLGOTuningFilePath, Params::m_MLGOTuningFilePath, ExecuteNetworkParams::m_ModelPath, Params::m_ModelPath, ExecuteNetworkParams::m_NumberOfThreads, Params::m_NumberOfThreads, Params::m_OutputBindings, ExecuteNetworkParams::m_OutputDetailsOnlyToStdOut, Params::m_OutputDetailsOnlyToStdOut, ExecuteNetworkParams::m_OutputDetailsToStdOut, Params::m_OutputDetailsToStdOut, ExecuteNetworkParams::m_OutputNames, ExecuteNetworkParams::m_OutputTensorFiles, ExecuteNetworkParams::m_OutputTypes, ExecuteNetworkParams::m_ParseUnsupported, Params::m_ParseUnsupported, ExecuteNetworkParams::m_PrintIntermediate, Params::m_PrintIntermediateLayers, ExecuteNetworkParams::m_QuantizeInput, ExecuteNetworkParams::m_SaveCachedNetwork, Params::m_SaveCachedNetwork, ExecuteNetworkParams::m_SubgraphId, Params::m_SubgraphId, ExecuteNetworkParams::m_ThreadPoolSize, Params::m_ThreadPoolSize, ExecuteNetworkParams::m_ThresholdTime, Params::m_VisualizePostOptimizationModel, PopulateTensorWithData(), armnn::QAsymmS8, armnn::QAsymmU8, InferenceModel< IParser, TDataType >::Run(), InferenceModel< IParser, TDataType >::RunAsync(), armnn::Signed32, and Exception::what().