ArmNN
 23.11
IOptimizedNetwork Class Reference

#include <INetwork.hpp>

Public Member Functions

Status PrintGraph ()
 
Status SerializeToDot (std::ostream &stream) const
 
arm::pipe::ProfilingGuid GetGuid () const
 
size_t GetNumInputs () const
 
size_t GetNumOutputs () const
 
void ExecuteStrategy (IStrategy &strategy) const
 
 IOptimizedNetwork (const IOptimizedNetwork &other, const ModelOptions &modelOptions)
 Creates a copy of the IOptimizedNetwork. More...
 
 IOptimizedNetwork (std::unique_ptr< Graph > graph)
 
 IOptimizedNetwork (std::unique_ptr< OptimizedNetworkImpl > impl)
 
 ~IOptimizedNetwork ()
 
const std::shared_ptr< IProfiler > & GetProfiler () const
 

Static Public Member Functions

static void Destroy (IOptimizedNetwork *network)
 

Protected Member Functions

 IOptimizedNetwork (std::unique_ptr< Graph > graph, const ModelOptions &modelOptions)
 

Protected Attributes

std::unique_ptr< OptimizedNetworkImplpOptimizedNetworkImpl
 

Friends

class LoadedNetwork
 
class experimental::AsyncNetworkImpl
 
class experimental::WorkingMemHandle
 
GraphGetGraphForTesting (IOptimizedNetwork *optNetPtr)
 
ModelOptionsGetModelOptionsForTesting (IOptimizedNetwork *optNetPtr)
 
IOptimizedNetworkPtr Optimize (const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 
IOptimizedNetworkPtr Optimize (const Graph &inGraph, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 

Detailed Description

Definition at line 901 of file INetwork.hpp.

Constructor & Destructor Documentation

◆ IOptimizedNetwork() [1/4]

IOptimizedNetwork ( const IOptimizedNetwork other,
const ModelOptions modelOptions 
)

Creates a copy of the IOptimizedNetwork.

The IOptimizedNetwork will not be reoptimized, the provided ModelOptions will only be used when creating a LoadedNetwork.

Definition at line 686 of file Network.cpp.

687  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(*other.pOptimizedNetworkImpl.get(), modelOptions)) {}

◆ IOptimizedNetwork() [2/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph)

Definition at line 689 of file Network.cpp.

690  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph))) {}

◆ IOptimizedNetwork() [3/4]

IOptimizedNetwork ( std::unique_ptr< OptimizedNetworkImpl impl)

Definition at line 692 of file Network.cpp.

693  : pOptimizedNetworkImpl(std::move(impl)) {}

◆ ~IOptimizedNetwork()

~IOptimizedNetwork ( )
default

◆ IOptimizedNetwork() [4/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph,
const ModelOptions modelOptions 
)
protected

Definition at line 695 of file Network.cpp.

696  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph), modelOptions)) {}

Member Function Documentation

◆ Destroy()

void Destroy ( IOptimizedNetwork network)
static

Definition at line 700 of file Network.cpp.

701 {
702  delete network;
703 }

Referenced by armnn::Optimize().

◆ ExecuteStrategy()

void ExecuteStrategy ( IStrategy strategy) const

Definition at line 3106 of file Network.cpp.

3107 {
3108  pOptimizedNetworkImpl->ExecuteStrategy(strategy);
3109 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetGuid()

arm::pipe::ProfilingGuid GetGuid ( ) const

Definition at line 720 of file Network.cpp.

721 {
722  return pOptimizedNetworkImpl->GetGuid();
723 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumInputs()

size_t GetNumInputs ( ) const

Definition at line 725 of file Network.cpp.

726 {
727  return pOptimizedNetworkImpl->GetNumInputs();
728 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumOutputs()

size_t GetNumOutputs ( ) const

Definition at line 730 of file Network.cpp.

731 {
732  return pOptimizedNetworkImpl->GetNumOutputs();
733 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetProfiler()

const std::shared_ptr< IProfiler > & GetProfiler ( ) const

Definition at line 715 of file Network.cpp.

716 {
717  return pOptimizedNetworkImpl->GetGraph().GetProfiler();
718 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ PrintGraph()

Status PrintGraph ( )

Definition at line 705 of file Network.cpp.

706 {
707  return pOptimizedNetworkImpl->PrintGraph();
708 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ SerializeToDot()

Status SerializeToDot ( std::ostream &  stream) const

Definition at line 710 of file Network.cpp.

711 {
712  return pOptimizedNetworkImpl->SerializeToDot(stream);
713 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

Referenced by armnn_driver::ExportNetworkGraphToDotFile().

Friends And Related Function Documentation

◆ experimental::AsyncNetworkImpl

friend class experimental::AsyncNetworkImpl
friend

Definition at line 928 of file INetwork.hpp.

◆ experimental::WorkingMemHandle

friend class experimental::WorkingMemHandle
friend

Definition at line 929 of file INetwork.hpp.

◆ GetGraphForTesting

Graph& GetGraphForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 49 of file TestUtils.cpp.

50 {
51  return optNet->pOptimizedNetworkImpl->GetGraph();
52 }

◆ GetModelOptionsForTesting

ModelOptions& GetModelOptionsForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 54 of file TestUtils.cpp.

55 {
56  return optNet->pOptimizedNetworkImpl->GetModelOptions();
57 }

◆ LoadedNetwork

friend class LoadedNetwork
friend

Definition at line 926 of file INetwork.hpp.

◆ Optimize [1/2]

IOptimizedNetworkPtr Optimize ( const Graph inGraph,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptionsOpaque options,
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
inGraphGraph to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 1896 of file Network.cpp.

1901 {
1902  ARMNN_LOG(debug) << options.ToString();
1903 
1904  // Enable profiling
1905  auto profiler = inGraph.GetProfiler();
1907  profiler->EnableProfiling(options.GetProfilingEnabled());
1908 
1910  if (backendPreferences.empty())
1911  {
1912  throw InvalidArgumentException("Invoked Optimize with no backends specified");
1913  }
1914 
1915  if (options.GetReduceFp32ToBf16())
1916  {
1917  throw InvalidArgumentException("BFloat16 optimization is currently ignored. In order to use Bf16 optimization "
1918  "Please use the FastMathEnabled backend option for CpuAcc or GpuAcc.");
1919  }
1920 
1921  if (options.GetReduceFp32ToFp16() && options.GetReduceFp32ToBf16())
1922  {
1923  throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time.");
1924  }
1925 
1926  // Ensure TensorInfo is set on all output slots of ConstantLayers in the graph
1927  inGraph.VerifyConstantLayerSetTensorInfo();
1928 
1929  std::unique_ptr<Graph> graph = std::make_unique<Graph>(inGraph);
1930 
1931  // We need to pass on the information about whether import and export is enabled to the LoadNetwork phase.
1932  // The mechanism to do that is to add model options to the optimized network.
1933  armnn::BackendOptions importExport("Global",
1934  {{"ImportEnabled", options.GetImportEnabled()},
1935  {"ExportEnabled", options.GetExportEnabled()}});
1936  ModelOptions optimizedOptions(options.GetModelOptions());
1937  optimizedOptions.push_back(importExport);
1938 
1939  auto optNet = IOptimizedNetworkPtr(new IOptimizedNetwork(std::move(graph), optimizedOptions),
1941 
1942  IOptimizedNetwork* optNetObjPtr = optNet.get();
1943 
1944  // Get the optimized graph
1945  Graph& optGraph = optNetObjPtr->pOptimizedNetworkImpl->GetGraph();
1946 
1947  if(options.GetShapeInferenceMethod() == ShapeInferenceMethod::InferAndValidate)
1948  {
1949  // Infer the tensor infos for all output slots. Throws an exception on failure
1950  optGraph.InferTensorInfos();
1951  }
1952 
1953  // Perform BroadcastToOptimizationLayer and then AddBroadcastReshapeLayer optimisation
1954  using namespace optimizations;
1956 
1958 
1959  if(options.GetShapeInferenceMethod() == ShapeInferenceMethod::ValidateOnly)
1960  {
1961  // Validate the tensor infos for all output slots. Throws an exception on failure
1962  optGraph.InferTensorInfos();
1963  }
1964 
1965 
1966  // Group Constant Layer optimizations together where possible.
1967  // This is important as:
1968  // FusePermuteIntoConstantLayer must happen before FoldPadIntoDepthwiseConvolution2d and
1969  // FuseBatchNormIntoDepthwiseConvolution2D.
1970  // ConvertConstDequantisationLayersToConstLayers must happen before FoldPadIntoConvolution2d
1973  // Perform optimisation passes
1979  MovePermuteUp(),
1980  MoveTransposeUp(),
1981  PermuteAsReshape(),
1994 
1995 
1996  // Initialize backend settings
1997  BackendSettings backendSettings(backendPreferences, deviceSpec);
1998  auto availablePreferredBackends = backendSettings.GetAvailablePreferredBackends();
1999  if (availablePreferredBackends.empty())
2000  {
2001  std::stringstream failureMsg;
2002  failureMsg << "None of the preferred backends " << backendPreferences
2003  << " are supported. Current platform provides " << backendSettings.m_SupportedBackends;
2004  ReportError(failureMsg.str(), messages);
2005  throw InvalidArgumentException(failureMsg.str());
2006  }
2007 
2008  // Create a map to temporarily hold initialized backend objects
2009  TensorHandleFactoryRegistry tensorHandleFactoryRegistry;
2010  BackendsMap backends = CreateSupportedBackends(tensorHandleFactoryRegistry, backendSettings);
2011 
2012  if (options.GetReduceFp32ToFp16())
2013  {
2014  bool hasFp16 = CheckFp16Support(backends, availablePreferredBackends);
2015  if (hasFp16)
2016  {
2017  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ReduceFp32ToFp16");
2020  }
2021  }
2022 
2023  // Assign an available backend to each layer
2024  Graph::Iterator firstLayer = optGraph.begin();
2025  Graph::Iterator lastLayer = optGraph.end();
2026  OptimizationResult assignBackendsResult = AssignBackends(optNetObjPtr->pOptimizedNetworkImpl.get(),
2027  backendSettings,
2028  firstLayer,
2029  lastLayer,
2030  messages);
2031  if (assignBackendsResult.m_Error)
2032  {
2033  // Failed to assign a backend to each layer
2034  throw InvalidArgumentException("Failed to assign a backend to each layer");
2035  }
2036 
2039 
2040  // Apply the backend-specific optimizations
2041  OptimizationResult backendOptimizationResult = ApplyBackendOptimizations(optNetObjPtr->pOptimizedNetworkImpl.get(),
2042  backendSettings,
2043  backends,
2044  options.GetModelOptions(),
2045  messages);
2046  if (backendOptimizationResult.m_Error)
2047  {
2048  // Failed to apply the backend-specific optimizations
2049  throw InvalidArgumentException("Failed to apply the backend-specific optimizations");
2050  }
2051 
2052  // Convert constants
2053  {
2054  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ConvertConstants");
2057  }
2058 
2059  // This must occur after all topological changes to the graph and any redirection of variables
2060  // If the debug flag is set, then insert a DebugLayer after each layer
2061  // Doing this after applying the backend optimizations as they might have changed some layers
2062  if (options.GetDebugEnabled() && !options.GetDebugToFileEnabled())
2063  {
2065  }
2066  else if (options.GetDebugToFileEnabled())
2067  {
2068  // Setup the output file path
2069  try
2070  {
2071 #if !defined(ARMNN_DISABLE_FILESYSTEM)
2072  auto result = armnnUtils::Filesystem::CreateDirectory("/ArmNNIntermediateLayerOutputs");
2073  ARMNN_LOG(info) << "Intermediate tensors will be written to: " << result;
2074 #endif
2076  }
2077  catch (const armnn::RuntimeException& e)
2078  {
2079  // If we cannot create the output directory then we'll issue a warning and continue.
2080  ARMNN_LOG(warning) << "Unable to print intermediate layer outputs : " << e.what();
2081  }
2082  }
2083 
2084  // Calculate the compatibility strategies for tensor handles
2085  OptimizationResult strategyResult = SelectTensorHandleStrategy(optGraph,
2086  backends,
2087  tensorHandleFactoryRegistry,
2088  options.GetImportEnabled(),
2089  options.GetExportEnabled(),
2090  messages);
2091 
2092  if (strategyResult.m_Error)
2093  {
2094  // Failed to apply the backend-specific optimizations
2096  }
2097 
2098  // Based on the tensor handle strategy determined above, insert copy layers where required.
2099  {
2100  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_AddCompatibilityLayers");
2101  optGraph.AddCompatibilityLayers(backends, tensorHandleFactoryRegistry);
2102  }
2103 
2104  return optNet;
2105 }

◆ Optimize [2/2]

IOptimizedNetworkPtr Optimize ( const INetwork inNetwork,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptionsOpaque options = OptimizerOptionsOpaque(),
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
networkINetwork description of the network to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 2121 of file Network.cpp.

2126 {
2127  return Optimize(inNetwork.pNetworkImpl->GetGraph(),
2128  backendPreferences,
2129  deviceSpec,
2130  options,
2131  messages);
2132 }

Member Data Documentation

◆ pOptimizedNetworkImpl


The documentation for this class was generated from the following files:
armnn::optimizations::InsertDebugToFileLayer
OptimizeForType< Layer, AddDebugToFileImpl > InsertDebugToFileLayer
Definition: AddDebug.hpp:54
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:340
armnn::ApplyBackendOptimizations
OptimizationResult ApplyBackendOptimizations(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, BackendsMap &backends, const ModelOptions &modelOptions, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1328
armnn::Compute::Undefined
@ Undefined
armnn::optimizations::InsertDebugLayer
OptimizeForType< Layer, AddDebugImpl > InsertDebugLayer
Definition: AddDebug.hpp:53
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat32
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoConvolution2DFloat32
Definition: FuseBatchNorm.hpp:222
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:600
armnn::optimizations::OptimizeInversePermutes
OptimizeForConnection< PermuteLayer, PermuteLayer, OptimizeInversePermutesImpl< PermuteLayer > > OptimizeInversePermutes
Definition: OptimizeInversePermutes.hpp:43
armnn::optimizations::TransposeAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< TransposeLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< TransposeLayer > > TransposeAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:104
armnn::optimizations::FoldPadIntoPooling2d
OptimizeForExclusiveConnection< PadLayer, Pooling2dLayer, pad_fold::FoldPadIntoPooling2dImpl > FoldPadIntoPooling2d
Definition: FoldPadIntoLayer2d.hpp:260
armnn::optimizations::Fp32NetworkToFp16Converter
OptimizeForType< Layer, ConvertFp32NetworkToFp16Impl > Fp32NetworkToFp16Converter
Definition: ConvertFp32NetworkToFp16.hpp:87
armnn::optimizations::FoldPadIntoConvolution2d
OptimizeForExclusiveConnection< PadLayer, Convolution2dLayer, pad_fold::FoldPadIntoConvolution2dImpl > FoldPadIntoConvolution2d
Definition: FoldPadIntoLayer2d.hpp:254
armnn::optimizations::ConvertConstDequantisationLayersToConstLayers
OptimizeForConnection< ConstantLayer, DequantizeLayer, ConvertConstDequantisationLayersToConstLayersImpl > ConvertConstDequantisationLayersToConstLayers
Definition: ConvertConstDequantisationLayersToConstLayers.hpp:173
armnn::optimizations::MoveTransposeUp
OptimizeForConnection< Layer, TransposeLayer, MoveTransposeUpImpl > MoveTransposeUp
Definition: MoveTransposeUp.hpp:83
armnn::AssignBackends
OptimizationResult AssignBackends(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, Graph::Iterator &firstLayer, Graph::Iterator &lastLayer, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1186
armnn::optimizations::BroadcastToOptimizationLayer
OptimizeForType< BroadcastToLayer, DeleteBroadcastToImpl > BroadcastToOptimizationLayer
Definition: DeleteBroadcastTo.hpp:38
armnn::Graph::Iterator
LayerList::const_iterator Iterator
Definition: Graph.hpp:53
armnn::optimizations::PermuteAsReshape
OptimizeForType< PermuteLayer, PermuteAsReshapeImpl > PermuteAsReshape
Definition: PermuteAsReshape.hpp:66
armnn::optimizations::PermuteAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< PermuteLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< PermuteLayer > > PermuteAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:102
armnn::SelectTensorHandleStrategy
OptimizationResult SelectTensorHandleStrategy(Graph &optGraph, BackendsMap &backends, TensorHandleFactoryRegistry &registry, bool importEnabled, bool exportEnabled, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1812
armnn::optimizations::MovePermuteUp
OptimizeForConnection< Layer, PermuteLayer, MovePermuteUpImpl > MovePermuteUp
Definition: MovePermuteUp.hpp:83
armnn::optimizations::OptimizeInverseConversionsFp16
OptimizeForConnection< ConvertFp16ToFp32Layer, ConvertFp32ToFp16Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp16
Definition: OptimizeInverseConversions.hpp:42
armnn::Exception::what
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::CreateSupportedBackends
BackendsMap CreateSupportedBackends(TensorHandleFactoryRegistry &handleFactoryRegistry, BackendSettings &backendSettings)
Definition: Network.cpp:1309
armnn::optimizations::ConvertConstantsFloatToHalf
ConvertConstants< Float32ToFloat16, IsFloat16Layer > ConvertConstantsFloatToHalf
Definition: ConvertConstants.hpp:99
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::ReportError
void ReportError(const std::string &errorMessage, Optional< std::vector< std::string > & > errorMessages)
Definition: Network.cpp:756
armnn::CheckFp16Support
bool CheckFp16Support(BackendsMap &backends, const std::vector< BackendId > &availablePreferredBackends)
Definition: Network.cpp:1029
armnn::optimizations::FusePermuteIntoConstLayer
OptimizeForConnection< ConstantLayer, PermuteLayer, ConvertConstPermuteLayersToConstLayers > FusePermuteIntoConstLayer
Definition: ConvertConstPermuteLayersToConstLayers.hpp:124
armnn::MakeOptimizations
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::IOptimizedNetwork::Optimize
friend IOptimizedNetworkPtr Optimize(const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
Create an optimized version of the network.
Definition: Network.cpp:2121
armnn::ShapeInferenceMethod::ValidateOnly
@ ValidateOnly
Validate all output shapes.
armnn::ShapeInferenceMethod::InferAndValidate
@ InferAndValidate
Infer missing output shapes and validate all output shapes.
armnn::optimizations::OptimizeInverseConversionsFp32
OptimizeForConnection< ConvertFp32ToFp16Layer, ConvertFp16ToFp32Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp32
Definition: OptimizeInverseConversions.hpp:44
armnn::BackendOptions
Struct for the users to pass backend specific options.
Definition: BackendOptions.hpp:22
armnn::optimizations::TransposeAsReshape
OptimizeForType< TransposeLayer, TransposeAsReshapeImpl > TransposeAsReshape
Definition: TransposeAsReshape.hpp:77
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:593
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat16
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoConvolution2DFloat16
Definition: FuseBatchNorm.hpp:227
armnn::IOptimizedNetwork::Destroy
static void Destroy(IOptimizedNetwork *network)
Definition: Network.cpp:700
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat16
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoDepthwiseConvolution2DFloat16
Definition: FuseBatchNorm.hpp:237
armnn::BackendsMap
std::map< BackendId, std::unique_ptr< class IBackendInternal > > BackendsMap
Definition: Network.hpp:282
armnn::optimizations::SquashEqualTransposeSiblings
OptimizeForConnection< Layer, TransposeLayer, SquashEqualSiblingsImpl< TransposeLayer > > SquashEqualTransposeSiblings
Definition: SquashEqualSiblings.hpp:69
armnn::optimizations::ConvertConstantsHalfToFloat
ConvertConstants< Float16ToFloat32, IsFloat32Layer > ConvertConstantsHalfToFloat
Definition: ConvertConstants.hpp:98
armnn::IOptimizedNetwork::IOptimizedNetwork
IOptimizedNetwork(const IOptimizedNetwork &other, const ModelOptions &modelOptions)
Creates a copy of the IOptimizedNetwork.
Definition: Network.cpp:686
armnn::optimizations::SquashEqualPermuteSiblings
OptimizeForConnection< Layer, PermuteLayer, SquashEqualSiblingsImpl< PermuteLayer > > SquashEqualPermuteSiblings
Definition: SquashEqualSiblings.hpp:67
armnn::Optimizer::Pass
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
armnn::ModelOptions
std::vector< BackendOptions > ModelOptions
Definition: BackendOptions.hpp:18
armnn::optimizations::FoldPadIntoDepthwiseConvolution2d
OptimizeForExclusiveConnection< PadLayer, DepthwiseConvolution2dLayer, pad_fold::FoldPadIntoDepthwiseConvolution2dImpl > FoldPadIntoDepthwiseConvolution2d
Definition: FoldPadIntoLayer2d.hpp:258
armnnUtils::Filesystem::CreateDirectory
std::string CreateDirectory(std::string sPath)
Returns full path to temporary folder.
Definition: Filesystem.cpp:47
armnn::optimizations::SquashEqualReshapeSiblings
OptimizeForConnection< Layer, ReshapeLayer, SquashEqualSiblingsImpl< ReshapeLayer > > SquashEqualReshapeSiblings
Definition: SquashEqualSiblings.hpp:70
armnn::optimizations::OptimizeInverseTransposes
OptimizeForConnection< TransposeLayer, TransposeLayer, OptimizeInversePermutesImpl< TransposeLayer > > OptimizeInverseTransposes
Definition: OptimizeInversePermutes.hpp:45
armnn::optimizations::AddBroadcastReshapeLayer
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
Definition: AddBroadcastReshapeLayer.hpp:94
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat32
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoDepthwiseConvolution2DFloat32
Definition: FuseBatchNorm.hpp:232
armnn::optimizations::OptimizeConsecutiveReshapes
OptimizeForConnection< ReshapeLayer, ReshapeLayer, OptimizeConsecutiveReshapesImpl > OptimizeConsecutiveReshapes
Definition: OptimizeConsecutiveReshapes.hpp:61
armnn::IOptimizedNetwork::pOptimizedNetworkImpl
std::unique_ptr< OptimizedNetworkImpl > pOptimizedNetworkImpl
Definition: INetwork.hpp:946