23.05
|
#include <INetwork.hpp>
Definition at line 862 of file INetwork.hpp.
◆ IOptimizedNetwork() [1/4]
◆ IOptimizedNetwork() [2/4]
◆ IOptimizedNetwork() [3/4]
◆ ~IOptimizedNetwork()
◆ IOptimizedNetwork() [4/4]
◆ Destroy()
◆ ExecuteStrategy()
void ExecuteStrategy |
( |
IStrategy & |
strategy | ) |
const |
◆ GetGuid()
arm::pipe::ProfilingGuid GetGuid |
( |
| ) |
const |
◆ GetNumInputs()
size_t GetNumInputs |
( |
| ) |
const |
◆ GetNumOutputs()
size_t GetNumOutputs |
( |
| ) |
const |
◆ GetProfiler()
const std::shared_ptr< IProfiler > & GetProfiler |
( |
| ) |
const |
◆ PrintGraph()
◆ SerializeToDot()
Status SerializeToDot |
( |
std::ostream & |
stream | ) |
const |
◆ experimental::AsyncNetworkImpl
friend class experimental::AsyncNetworkImpl |
|
friend |
◆ experimental::WorkingMemHandle
◆ GetGraphForTesting
Definition at line 49 of file TestUtils.cpp.
51 return optNet->pOptimizedNetworkImpl->GetGraph();
◆ GetModelOptionsForTesting
Definition at line 54 of file TestUtils.cpp.
56 return optNet->pOptimizedNetworkImpl->GetModelOptions();
◆ LoadedNetwork
◆ Optimize [1/2]
Create an optimized version of the network.
- Parameters
-
inGraph | Graph to be optimized. |
backendPreferences | The choice of the backend ordered by user preferences. |
deviceSpec | DeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec() |
messages | If there are failures or warnings a string describing same will be added to the vector |
options | OptimizerOptions object with optimizer configuration options |
- Returns
- An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.
Definition at line 1786 of file Network.cpp.
1795 auto profiler = inGraph.GetProfiler();
1797 profiler->EnableProfiling(options.GetProfilingEnabled());
1800 if (backendPreferences.empty())
1802 throw InvalidArgumentException(
"Invoked Optimize with no backends specified");
1805 if (options.GetReduceFp32ToBf16())
1807 throw InvalidArgumentException(
"BFloat16 optimization is currently ignored. In order to use Bf16 optimization "
1808 "Please use the FastMathEnabled backend option for CpuAcc or GpuAcc.");
1811 if (options.GetReduceFp32ToFp16() && options.GetReduceFp32ToBf16())
1813 throw InvalidArgumentException(
"BFloat16 and Float16 optimization cannot be enabled at the same time.");
1817 inGraph.VerifyConstantLayerSetTensorInfo();
1819 std::unique_ptr<Graph> graph = std::make_unique<Graph>(inGraph);
1824 {{
"ImportEnabled", options.GetImportEnabled()},
1825 {
"ExportEnabled", options.GetExportEnabled()}});
1826 ModelOptions optimizedOptions(options.GetModelOptions());
1827 optimizedOptions.push_back(importExport);
1835 Graph& optGraph = optNetObjPtr->pOptimizedNetworkImpl->GetGraph();
1840 optGraph.InferTensorInfos();
1844 using namespace optimizations;
1850 optGraph.InferTensorInfos();
1883 if (options.GetReduceFp32ToFp16())
1891 BackendSettings backendSettings(backendPreferences, deviceSpec);
1892 if (backendSettings.GetAvailablePreferredBackends().empty())
1894 std::stringstream failureMsg;
1895 failureMsg <<
"None of the preferred backends " << backendPreferences
1896 <<
" are supported. Current platform provides " << backendSettings.m_SupportedBackends;
1898 throw InvalidArgumentException(failureMsg.str());
1902 TensorHandleFactoryRegistry tensorHandleFactoryRegistry;
1908 OptimizationResult assignBackendsResult =
AssignBackends(optNetObjPtr->pOptimizedNetworkImpl.get(),
1913 if (assignBackendsResult.m_Error)
1916 throw InvalidArgumentException(
"Failed to assign a backend to each layer");
1926 options.GetModelOptions(),
1928 if (backendOptimizationResult.m_Error)
1931 throw InvalidArgumentException(
"Failed to apply the backend-specific optimizations");
1944 if (options.GetDebugEnabled() && !options.GetDebugToFileEnabled())
1948 else if (options.GetDebugToFileEnabled())
1953 #if !defined(ARMNN_DISABLE_FILESYSTEM)
1955 ARMNN_LOG(info) <<
"Intermediate tensors will be written to: " << result;
1962 ARMNN_LOG(warning) <<
"Unable to print intermediate layer outputs : " << e.
what();
1969 tensorHandleFactoryRegistry,
1970 options.GetImportEnabled(),
1971 options.GetExportEnabled(),
1974 if (strategyResult.m_Error)
1983 optGraph.AddCompatibilityLayers(backends, tensorHandleFactoryRegistry);
◆ Optimize [2/2]
Create an optimized version of the network.
- Parameters
-
network | INetwork description of the network to be optimized. |
backendPreferences | The choice of the backend ordered by user preferences. |
deviceSpec | DeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec() |
messages | If there are failures or warnings a string describing same will be added to the vector |
options | OptimizerOptions object with optimizer configuration options |
- Returns
- An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.
Definition at line 2003 of file Network.cpp.
2009 return Optimize(inNetwork.pNetworkImpl->GetGraph(),
◆ pOptimizedNetworkImpl
The documentation for this class was generated from the following files:
OptimizeForType< Layer, AddDebugToFileImpl > InsertDebugToFileLayer
OptimizeForConnection< ConvertFp16ToFp32Layer, ConvertFp32ToFp16Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp16
OptimizationResult SelectTensorHandleStrategy(Graph &optGraph, BackendsMap &backends, TensorHandleFactoryRegistry ®istry, bool importEnabled, bool exportEnabled, Optional< std::vector< std::string > & > errMessages)
LayerList::const_iterator Iterator
OptimizeForExclusiveConnection< PadLayer, Convolution2dLayer, pad_fold::FoldPadIntoConvolution2dImpl > FoldPadIntoConvolution2d
OptimizeForConnection< ConstantLayer, PermuteLayer, ConvertConstPermuteLayersToConstLayers > FusePermuteIntoConstLayer
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
OptimizeForConnection< Layer, TransposeLayer, MoveTransposeUpImpl > MoveTransposeUp
friend IOptimizedNetworkPtr Optimize(const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
Create an optimized version of the network.
static void Pass(Graph &graph, const Optimizations &optimizations)
std::string CreateDirectory(std::string sPath)
Returns full path to temporary folder.
std::map< BackendId, std::unique_ptr< class IBackendInternal > > BackendsMap
@ ValidateOnly
Validate all output shapes.
OptimizeForConnection< ConvertFp32ToFp16Layer, ConvertFp16ToFp32Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp32
OptimizationResult ApplyBackendOptimizations(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, BackendsMap &backends, const ModelOptions &modelOptions, Optional< std::vector< std::string > & > errMessages)
OptimizeForConnection< TransposeLayer, TransposeLayer, OptimizeInversePermutesImpl< TransposeLayer > > OptimizeInverseTransposes
Struct for the users to pass backend specific options.
std::vector< BackendOptions > ModelOptions
OptimizeForConnection< Layer, ReshapeLayer, SquashEqualSiblingsImpl< ReshapeLayer > > SquashEqualReshapeSiblings
virtual const char * what() const noexcept override
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
BackendsMap CreateSupportedBackends(TensorHandleFactoryRegistry &handleFactoryRegistry, BackendSettings &backendSettings)
Optimizer::Optimizations MakeOptimizations(Args &&... args)
OptimizeForConnection< Layer, PermuteLayer, MovePermuteUpImpl > MovePermuteUp
#define ARMNN_LOG(severity)
ConvertConstants< Float32ToFloat16, IsFloat16Layer > ConvertConstantsFloatToHalf
OptimizeForConnection< Layer, TransposeLayer, SquashEqualSiblingsImpl< TransposeLayer > > SquashEqualTransposeSiblings
static void Destroy(IOptimizedNetwork *network)
void ReportError(const std::string &errorMessage, Optional< std::vector< std::string > & > errorMessages)
std::unique_ptr< OptimizedNetworkImpl > pOptimizedNetworkImpl
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
OptimizeForType< Layer, ConvertFp32NetworkToFp16Impl > Fp32NetworkToFp16Converter
static ProfilerManager & GetInstance()
IOptimizedNetwork(const IOptimizedNetwork &other, const ModelOptions &modelOptions)
Creates a copy of the IOptimizedNetwork.
OptimizeForType< Layer, AddDebugImpl > InsertDebugLayer
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoDepthwiseConvolution2DFloat16
OptimizeForType< TransposeLayer, TransposeAsReshapeImpl > TransposeAsReshape
OptimizeForExclusiveConnection< PadLayer, DepthwiseConvolution2dLayer, pad_fold::FoldPadIntoDepthwiseConvolution2dImpl > FoldPadIntoDepthwiseConvolution2d
OptimizeForConnection< PermuteLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< PermuteLayer > > PermuteAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< ConstantLayer, DequantizeLayer, ConvertConstDequantisationLayersToConstLayersImpl > ConvertConstDequantisationLayersToConstLayers
OptimizeForConnection< PermuteLayer, PermuteLayer, OptimizeInversePermutesImpl< PermuteLayer > > OptimizeInversePermutes
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoConvolution2DFloat16
OptimizeForExclusiveConnection< PadLayer, Pooling2dLayer, pad_fold::FoldPadIntoPooling2dImpl > FoldPadIntoPooling2d
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoConvolution2DFloat32
OptimizeForConnection< Layer, PermuteLayer, SquashEqualSiblingsImpl< PermuteLayer > > SquashEqualPermuteSiblings
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoDepthwiseConvolution2DFloat32
ConvertConstants< Float16ToFloat32, IsFloat32Layer > ConvertConstantsHalfToFloat
OptimizationResult AssignBackends(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, Graph::Iterator &firstLayer, Graph::Iterator &lastLayer, Optional< std::vector< std::string > & > errMessages)
OptimizeForType< PermuteLayer, PermuteAsReshapeImpl > PermuteAsReshape
OptimizeForConnection< ReshapeLayer, ReshapeLayer, OptimizeConsecutiveReshapesImpl > OptimizeConsecutiveReshapes
OptimizeForConnection< TransposeLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< TransposeLayer > > TransposeAndBatchToSpaceAsDepthToSpace
void RegisterProfiler(IProfiler *profiler)
@ InferAndValidate
Infer missing output shapes and validate all output shapes.