ArmNN
 24.02
IOptimizedNetwork Class Reference

#include <INetwork.hpp>

Public Member Functions

Status PrintGraph ()
 
Status SerializeToDot (std::ostream &stream) const
 
arm::pipe::ProfilingGuid GetGuid () const
 
size_t GetNumInputs () const
 
size_t GetNumOutputs () const
 
void ExecuteStrategy (IStrategy &strategy) const
 
 IOptimizedNetwork (const IOptimizedNetwork &other, const ModelOptions &modelOptions)
 Creates a copy of the IOptimizedNetwork. More...
 
 IOptimizedNetwork (std::unique_ptr< Graph > graph)
 
 IOptimizedNetwork (std::unique_ptr< OptimizedNetworkImpl > impl)
 
 ~IOptimizedNetwork ()
 
const std::shared_ptr< IProfiler > & GetProfiler () const
 

Static Public Member Functions

static void Destroy (IOptimizedNetwork *network)
 

Protected Member Functions

 IOptimizedNetwork (std::unique_ptr< Graph > graph, const ModelOptions &modelOptions)
 

Protected Attributes

std::unique_ptr< OptimizedNetworkImplpOptimizedNetworkImpl
 

Friends

class LoadedNetwork
 
class experimental::AsyncNetworkImpl
 
class experimental::WorkingMemHandle
 
GraphGetGraphForTesting (IOptimizedNetwork *optNetPtr)
 
ModelOptionsGetModelOptionsForTesting (IOptimizedNetwork *optNetPtr)
 
IOptimizedNetworkPtr Optimize (const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 
IOptimizedNetworkPtr Optimize (const Graph &inGraph, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 

Detailed Description

Definition at line 901 of file INetwork.hpp.

Constructor & Destructor Documentation

◆ IOptimizedNetwork() [1/4]

IOptimizedNetwork ( const IOptimizedNetwork other,
const ModelOptions modelOptions 
)

Creates a copy of the IOptimizedNetwork.

The IOptimizedNetwork will not be reoptimized, the provided ModelOptions will only be used when creating a LoadedNetwork.

Definition at line 686 of file Network.cpp.

687  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(*other.pOptimizedNetworkImpl.get(), modelOptions)) {}

◆ IOptimizedNetwork() [2/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph)

Definition at line 689 of file Network.cpp.

690  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph))) {}

◆ IOptimizedNetwork() [3/4]

IOptimizedNetwork ( std::unique_ptr< OptimizedNetworkImpl impl)

Definition at line 692 of file Network.cpp.

693  : pOptimizedNetworkImpl(std::move(impl)) {}

◆ ~IOptimizedNetwork()

~IOptimizedNetwork ( )
default

◆ IOptimizedNetwork() [4/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph,
const ModelOptions modelOptions 
)
protected

Definition at line 695 of file Network.cpp.

696  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph), modelOptions)) {}

Member Function Documentation

◆ Destroy()

void Destroy ( IOptimizedNetwork network)
static

Definition at line 700 of file Network.cpp.

701 {
702  delete network;
703 }

Referenced by armnn::Optimize().

◆ ExecuteStrategy()

void ExecuteStrategy ( IStrategy strategy) const

Definition at line 3117 of file Network.cpp.

3118 {
3119  pOptimizedNetworkImpl->ExecuteStrategy(strategy);
3120 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetGuid()

arm::pipe::ProfilingGuid GetGuid ( ) const

Definition at line 720 of file Network.cpp.

721 {
722  return pOptimizedNetworkImpl->GetGuid();
723 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumInputs()

size_t GetNumInputs ( ) const

Definition at line 725 of file Network.cpp.

726 {
727  return pOptimizedNetworkImpl->GetNumInputs();
728 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumOutputs()

size_t GetNumOutputs ( ) const

Definition at line 730 of file Network.cpp.

731 {
732  return pOptimizedNetworkImpl->GetNumOutputs();
733 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetProfiler()

const std::shared_ptr< IProfiler > & GetProfiler ( ) const

Definition at line 715 of file Network.cpp.

716 {
717  return pOptimizedNetworkImpl->GetGraph().GetProfiler();
718 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ PrintGraph()

Status PrintGraph ( )

Definition at line 705 of file Network.cpp.

706 {
707  return pOptimizedNetworkImpl->PrintGraph();
708 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ SerializeToDot()

Status SerializeToDot ( std::ostream &  stream) const

Definition at line 710 of file Network.cpp.

711 {
712  return pOptimizedNetworkImpl->SerializeToDot(stream);
713 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

Referenced by armnn_driver::ExportNetworkGraphToDotFile().

Friends And Related Function Documentation

◆ experimental::AsyncNetworkImpl

friend class experimental::AsyncNetworkImpl
friend

Definition at line 928 of file INetwork.hpp.

◆ experimental::WorkingMemHandle

friend class experimental::WorkingMemHandle
friend

Definition at line 929 of file INetwork.hpp.

◆ GetGraphForTesting

Graph& GetGraphForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 49 of file TestUtils.cpp.

50 {
51  return optNet->pOptimizedNetworkImpl->GetGraph();
52 }

◆ GetModelOptionsForTesting

ModelOptions& GetModelOptionsForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 54 of file TestUtils.cpp.

55 {
56  return optNet->pOptimizedNetworkImpl->GetModelOptions();
57 }

◆ LoadedNetwork

friend class LoadedNetwork
friend

Definition at line 926 of file INetwork.hpp.

◆ Optimize [1/2]

IOptimizedNetworkPtr Optimize ( const Graph inGraph,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptionsOpaque options,
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
inGraphGraph to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 1896 of file Network.cpp.

1901 {
1902  ARMNN_LOG(debug) << options.ToString();
1903 
1904  // Enable profiling
1905  auto profiler = inGraph.GetProfiler();
1907  profiler->EnableProfiling(options.GetProfilingEnabled());
1908 
1909  // Some backends don't play well together. Check here before continuing.
1910  {
1911  std::set<BackendId> backendSet(backendPreferences.begin(), backendPreferences.end());
1912  // GpuFsa cannot co-exist with GpuAcc.
1913  if (backendSet.find("GpuFsa") != backendSet.end() &&
1914  backendSet.find("GpuAcc") != backendSet.end())
1915  {
1916  throw InvalidArgumentException("The backends \"GpuAcc\" and \"GpuFsa\" cannot be specified "
1917  "for the same optimized network.");
1918  }
1919  }
1920 
1922  if (backendPreferences.empty())
1923  {
1924  throw InvalidArgumentException("Invoked Optimize with no backends specified");
1925  }
1926 
1927  if (options.GetReduceFp32ToBf16())
1928  {
1929  throw InvalidArgumentException("BFloat16 optimization is currently ignored. In order to use Bf16 optimization "
1930  "Please use the FastMathEnabled backend option for CpuAcc or GpuAcc.");
1931  }
1932 
1933  if (options.GetReduceFp32ToFp16() && options.GetReduceFp32ToBf16())
1934  {
1935  throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time.");
1936  }
1937 
1938  // Ensure TensorInfo is set on all output slots of ConstantLayers in the graph
1939  inGraph.VerifyConstantLayerSetTensorInfo();
1940 
1941  std::unique_ptr<Graph> graph = std::make_unique<Graph>(inGraph);
1942 
1943  // We need to pass on the information about whether import and export is enabled to the LoadNetwork phase.
1944  // The mechanism to do that is to add model options to the optimized network.
1945  armnn::BackendOptions importExport("Global",
1946  {{"ImportEnabled", options.GetImportEnabled()},
1947  {"ExportEnabled", options.GetExportEnabled()}});
1948  ModelOptions optimizedOptions(options.GetModelOptions());
1949  optimizedOptions.push_back(importExport);
1950 
1951  auto optNet = IOptimizedNetworkPtr(new IOptimizedNetwork(std::move(graph), optimizedOptions),
1953 
1954  IOptimizedNetwork* optNetObjPtr = optNet.get();
1955 
1956  // Get the optimized graph
1957  Graph& optGraph = optNetObjPtr->pOptimizedNetworkImpl->GetGraph();
1958 
1959  if(options.GetShapeInferenceMethod() == ShapeInferenceMethod::InferAndValidate)
1960  {
1961  // Infer the tensor infos for all output slots. Throws an exception on failure
1962  optGraph.InferTensorInfos();
1963  }
1964 
1965  // Perform BroadcastToOptimizationLayer and then AddBroadcastReshapeLayer optimisation
1966  using namespace optimizations;
1968 
1970 
1971  if(options.GetShapeInferenceMethod() == ShapeInferenceMethod::ValidateOnly)
1972  {
1973  // Validate the tensor infos for all output slots. Throws an exception on failure
1974  optGraph.InferTensorInfos();
1975  }
1976 
1977 
1978  // Group Constant Layer optimizations together where possible.
1979  // This is important as:
1980  // FusePermuteIntoConstantLayer must happen before FoldPadIntoDepthwiseConvolution2d and
1981  // FuseBatchNormIntoDepthwiseConvolution2D.
1982  // ConvertConstDequantisationLayersToConstLayers must happen before FoldPadIntoConvolution2d
1985  // Perform optimisation passes
1991  MovePermuteUp(),
1992  MoveTransposeUp(),
1993  PermuteAsReshape(),
2006 
2007  // Initialize backend settings
2008  BackendSettings backendSettings(backendPreferences, deviceSpec);
2009  auto availablePreferredBackends = backendSettings.GetAvailablePreferredBackends();
2010  if (availablePreferredBackends.empty())
2011  {
2012  std::stringstream failureMsg;
2013  failureMsg << "None of the preferred backends " << backendPreferences
2014  << " are supported. Current platform provides " << backendSettings.m_SupportedBackends;
2015  ReportError(failureMsg.str(), messages);
2016  throw InvalidArgumentException(failureMsg.str());
2017  }
2018 
2019  // Create a map to temporarily hold initialized backend objects
2020  TensorHandleFactoryRegistry tensorHandleFactoryRegistry;
2021  BackendsMap backends = CreateSupportedBackends(tensorHandleFactoryRegistry, backendSettings);
2022 
2023  if (options.GetReduceFp32ToFp16())
2024  {
2025  bool hasFp16 = CheckFp16Support(backends, availablePreferredBackends);
2026  if (hasFp16)
2027  {
2028  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ReduceFp32ToFp16");
2031  }
2032  }
2033 
2034  // Assign an available backend to each layer
2035  Graph::Iterator firstLayer = optGraph.begin();
2036  Graph::Iterator lastLayer = optGraph.end();
2037  OptimizationResult assignBackendsResult = AssignBackends(optNetObjPtr->pOptimizedNetworkImpl.get(),
2038  backendSettings,
2039  firstLayer,
2040  lastLayer,
2041  messages);
2042  if (assignBackendsResult.m_Error)
2043  {
2044  // Failed to assign a backend to each layer
2045  throw InvalidArgumentException("Failed to assign a backend to each layer");
2046  }
2047 
2050 
2051  // Apply the backend-specific optimizations
2052  OptimizationResult backendOptimizationResult = ApplyBackendOptimizations(optNetObjPtr->pOptimizedNetworkImpl.get(),
2053  backendSettings,
2054  backends,
2055  options.GetModelOptions(),
2056  messages);
2057  if (backendOptimizationResult.m_Error)
2058  {
2059  // Failed to apply the backend-specific optimizations
2060  throw InvalidArgumentException("Failed to apply the backend-specific optimizations");
2061  }
2062 
2063  // Convert constants
2064  {
2065  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ConvertConstants");
2068  }
2069 
2070  // This must occur after all topological changes to the graph and any redirection of variables
2071  // If the debug flag is set, then insert a DebugLayer after each layer
2072  // Doing this after applying the backend optimizations as they might have changed some layers
2073  if (options.GetDebugEnabled() && !options.GetDebugToFileEnabled())
2074  {
2076  }
2077  else if (options.GetDebugToFileEnabled())
2078  {
2079  // Setup the output file path
2080  try
2081  {
2082 #if !defined(ARMNN_DISABLE_FILESYSTEM)
2083  auto result = armnnUtils::Filesystem::CreateDirectory("/ArmNNIntermediateLayerOutputs");
2084  ARMNN_LOG(info) << "Intermediate tensors will be written to: " << result;
2085 #endif
2087  }
2088  catch (const armnn::RuntimeException& e)
2089  {
2090  // If we cannot create the output directory then we'll issue a warning and continue.
2091  ARMNN_LOG(warning) << "Unable to print intermediate layer outputs : " << e.what();
2092  }
2093  }
2094 
2095  // Calculate the compatibility strategies for tensor handles
2096  OptimizationResult strategyResult = SelectTensorHandleStrategy(optGraph,
2097  backends,
2098  tensorHandleFactoryRegistry,
2099  options.GetImportEnabled(),
2100  options.GetExportEnabled(),
2101  messages);
2102 
2103  if (strategyResult.m_Error)
2104  {
2105  // Failed to apply the backend-specific optimizations
2107  }
2108 
2109  // Based on the tensor handle strategy determined above, insert copy layers where required.
2110  {
2111  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_AddCompatibilityLayers");
2112  optGraph.AddCompatibilityLayers(backends, tensorHandleFactoryRegistry);
2113  }
2114 
2115  return optNet;
2116 }

◆ Optimize [2/2]

IOptimizedNetworkPtr Optimize ( const INetwork inNetwork,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptionsOpaque options = OptimizerOptionsOpaque(),
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
networkINetwork description of the network to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 2132 of file Network.cpp.

2137 {
2138  return Optimize(inNetwork.pNetworkImpl->GetGraph(),
2139  backendPreferences,
2140  deviceSpec,
2141  options,
2142  messages);
2143 }

Member Data Documentation

◆ pOptimizedNetworkImpl


The documentation for this class was generated from the following files:
armnn::optimizations::InsertDebugToFileLayer
OptimizeForType< Layer, AddDebugToFileImpl > InsertDebugToFileLayer
Definition: AddDebug.hpp:54
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:340
armnn::ApplyBackendOptimizations
OptimizationResult ApplyBackendOptimizations(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, BackendsMap &backends, const ModelOptions &modelOptions, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1328
armnn::Compute::Undefined
@ Undefined
armnn::optimizations::InsertDebugLayer
OptimizeForType< Layer, AddDebugImpl > InsertDebugLayer
Definition: AddDebug.hpp:53
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat32
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoConvolution2DFloat32
Definition: FuseBatchNorm.hpp:222
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:600
armnn::optimizations::OptimizeInversePermutes
OptimizeForConnection< PermuteLayer, PermuteLayer, OptimizeInversePermutesImpl< PermuteLayer > > OptimizeInversePermutes
Definition: OptimizeInversePermutes.hpp:43
armnn::optimizations::TransposeAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< TransposeLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< TransposeLayer > > TransposeAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:104
armnn::optimizations::FoldPadIntoPooling2d
OptimizeForExclusiveConnection< PadLayer, Pooling2dLayer, pad_fold::FoldPadIntoPooling2dImpl > FoldPadIntoPooling2d
Definition: FoldPadIntoLayer2d.hpp:283
armnn::optimizations::Fp32NetworkToFp16Converter
OptimizeForType< Layer, ConvertFp32NetworkToFp16Impl > Fp32NetworkToFp16Converter
Definition: ConvertFp32NetworkToFp16.hpp:87
armnn::optimizations::FoldPadIntoConvolution2d
OptimizeForExclusiveConnection< PadLayer, Convolution2dLayer, pad_fold::FoldPadIntoConvolution2dImpl > FoldPadIntoConvolution2d
Definition: FoldPadIntoLayer2d.hpp:277
armnn::optimizations::ConvertConstDequantisationLayersToConstLayers
OptimizeForConnection< ConstantLayer, DequantizeLayer, ConvertConstDequantisationLayersToConstLayersImpl > ConvertConstDequantisationLayersToConstLayers
Definition: ConvertConstDequantisationLayersToConstLayers.hpp:173
armnn::optimizations::MoveTransposeUp
OptimizeForConnection< Layer, TransposeLayer, MoveTransposeUpImpl > MoveTransposeUp
Definition: MoveTransposeUp.hpp:83
armnn::AssignBackends
OptimizationResult AssignBackends(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, Graph::Iterator &firstLayer, Graph::Iterator &lastLayer, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1186
armnn::optimizations::BroadcastToOptimizationLayer
OptimizeForType< BroadcastToLayer, DeleteBroadcastToImpl > BroadcastToOptimizationLayer
Definition: DeleteBroadcastTo.hpp:38
armnn::Graph::Iterator
LayerList::const_iterator Iterator
Definition: Graph.hpp:53
armnn::optimizations::PermuteAsReshape
OptimizeForType< PermuteLayer, PermuteAsReshapeImpl > PermuteAsReshape
Definition: PermuteAsReshape.hpp:66
armnn::optimizations::PermuteAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< PermuteLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< PermuteLayer > > PermuteAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:102
armnn::SelectTensorHandleStrategy
OptimizationResult SelectTensorHandleStrategy(Graph &optGraph, BackendsMap &backends, TensorHandleFactoryRegistry &registry, bool importEnabled, bool exportEnabled, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1812
armnn::optimizations::MovePermuteUp
OptimizeForConnection< Layer, PermuteLayer, MovePermuteUpImpl > MovePermuteUp
Definition: MovePermuteUp.hpp:83
armnn::optimizations::OptimizeInverseConversionsFp16
OptimizeForConnection< ConvertFp16ToFp32Layer, ConvertFp32ToFp16Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp16
Definition: OptimizeInverseConversions.hpp:42
armnn::Exception::what
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::CreateSupportedBackends
BackendsMap CreateSupportedBackends(TensorHandleFactoryRegistry &handleFactoryRegistry, BackendSettings &backendSettings)
Definition: Network.cpp:1309
armnn::optimizations::ConvertConstantsFloatToHalf
ConvertConstants< Float32ToFloat16, IsFloat16Layer > ConvertConstantsFloatToHalf
Definition: ConvertConstants.hpp:99
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::ReportError
void ReportError(const std::string &errorMessage, Optional< std::vector< std::string > & > errorMessages)
Definition: Network.cpp:756
armnn::CheckFp16Support
bool CheckFp16Support(BackendsMap &backends, const std::vector< BackendId > &availablePreferredBackends)
Definition: Network.cpp:1029
armnn::optimizations::FusePermuteIntoConstLayer
OptimizeForConnection< ConstantLayer, PermuteLayer, ConvertConstPermuteLayersToConstLayers > FusePermuteIntoConstLayer
Definition: ConvertConstPermuteLayersToConstLayers.hpp:124
armnn::MakeOptimizations
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::IOptimizedNetwork::Optimize
friend IOptimizedNetworkPtr Optimize(const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptionsOpaque &options, Optional< std::vector< std::string > & > messages)
Create an optimized version of the network.
Definition: Network.cpp:2132
armnn::ShapeInferenceMethod::ValidateOnly
@ ValidateOnly
Validate all output shapes.
armnn::ShapeInferenceMethod::InferAndValidate
@ InferAndValidate
Infer missing output shapes and validate all output shapes.
armnn::optimizations::OptimizeInverseConversionsFp32
OptimizeForConnection< ConvertFp32ToFp16Layer, ConvertFp16ToFp32Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp32
Definition: OptimizeInverseConversions.hpp:44
armnn::BackendOptions
Struct for the users to pass backend specific options.
Definition: BackendOptions.hpp:22
armnn::optimizations::TransposeAsReshape
OptimizeForType< TransposeLayer, TransposeAsReshapeImpl > TransposeAsReshape
Definition: TransposeAsReshape.hpp:77
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:593
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat16
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoConvolution2DFloat16
Definition: FuseBatchNorm.hpp:227
armnn::IOptimizedNetwork::Destroy
static void Destroy(IOptimizedNetwork *network)
Definition: Network.cpp:700
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat16
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoDepthwiseConvolution2DFloat16
Definition: FuseBatchNorm.hpp:237
armnn::BackendsMap
std::map< BackendId, std::unique_ptr< class IBackendInternal > > BackendsMap
Definition: Network.hpp:282
armnn::optimizations::SquashEqualTransposeSiblings
OptimizeForConnection< Layer, TransposeLayer, SquashEqualSiblingsImpl< TransposeLayer > > SquashEqualTransposeSiblings
Definition: SquashEqualSiblings.hpp:69
armnn::optimizations::ConvertConstantsHalfToFloat
ConvertConstants< Float16ToFloat32, IsFloat32Layer > ConvertConstantsHalfToFloat
Definition: ConvertConstants.hpp:98
armnn::IOptimizedNetwork::IOptimizedNetwork
IOptimizedNetwork(const IOptimizedNetwork &other, const ModelOptions &modelOptions)
Creates a copy of the IOptimizedNetwork.
Definition: Network.cpp:686
armnn::optimizations::SquashEqualPermuteSiblings
OptimizeForConnection< Layer, PermuteLayer, SquashEqualSiblingsImpl< PermuteLayer > > SquashEqualPermuteSiblings
Definition: SquashEqualSiblings.hpp:67
armnn::Optimizer::Pass
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
armnn::ModelOptions
std::vector< BackendOptions > ModelOptions
Definition: BackendOptions.hpp:18
armnn::optimizations::FoldPadIntoDepthwiseConvolution2d
OptimizeForExclusiveConnection< PadLayer, DepthwiseConvolution2dLayer, pad_fold::FoldPadIntoDepthwiseConvolution2dImpl > FoldPadIntoDepthwiseConvolution2d
Definition: FoldPadIntoLayer2d.hpp:281
armnnUtils::Filesystem::CreateDirectory
std::string CreateDirectory(std::string sPath)
Returns full path to temporary folder.
Definition: Filesystem.cpp:47
armnn::optimizations::SquashEqualReshapeSiblings
OptimizeForConnection< Layer, ReshapeLayer, SquashEqualSiblingsImpl< ReshapeLayer > > SquashEqualReshapeSiblings
Definition: SquashEqualSiblings.hpp:70
armnn::optimizations::OptimizeInverseTransposes
OptimizeForConnection< TransposeLayer, TransposeLayer, OptimizeInversePermutesImpl< TransposeLayer > > OptimizeInverseTransposes
Definition: OptimizeInversePermutes.hpp:45
armnn::optimizations::AddBroadcastReshapeLayer
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
Definition: AddBroadcastReshapeLayer.hpp:94
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat32
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoDepthwiseConvolution2DFloat32
Definition: FuseBatchNorm.hpp:232
armnn::optimizations::OptimizeConsecutiveReshapes
OptimizeForConnection< ReshapeLayer, ReshapeLayer, OptimizeConsecutiveReshapesImpl > OptimizeConsecutiveReshapes
Definition: OptimizeConsecutiveReshapes.hpp:61
armnn::IOptimizedNetwork::pOptimizedNetworkImpl
std::unique_ptr< OptimizedNetworkImpl > pOptimizedNetworkImpl
Definition: INetwork.hpp:946