ArmNN
 23.02
IOptimizedNetwork Class Reference

#include <INetwork.hpp>

Public Member Functions

Status PrintGraph ()
 
Status SerializeToDot (std::ostream &stream) const
 
arm::pipe::ProfilingGuid GetGuid () const
 
size_t GetNumInputs () const
 
size_t GetNumOutputs () const
 
void ExecuteStrategy (IStrategy &strategy) const
 
 IOptimizedNetwork (const IOptimizedNetwork &other, const ModelOptions &modelOptions)
 Creates a copy of the IOptimizedNetwork. More...
 
 IOptimizedNetwork (std::unique_ptr< Graph > graph)
 
 IOptimizedNetwork (std::unique_ptr< OptimizedNetworkImpl > impl)
 
 ~IOptimizedNetwork ()
 
const std::shared_ptr< IProfiler > & GetProfiler () const
 

Static Public Member Functions

static void Destroy (IOptimizedNetwork *network)
 

Protected Member Functions

 IOptimizedNetwork (std::unique_ptr< Graph > graph, const ModelOptions &modelOptions)
 

Protected Attributes

std::unique_ptr< OptimizedNetworkImplpOptimizedNetworkImpl
 

Friends

class LoadedNetwork
 
class experimental::AsyncNetworkImpl
 
class experimental::WorkingMemHandle
 
GraphGetGraphForTesting (IOptimizedNetwork *optNetPtr)
 
ModelOptionsGetModelOptionsForTesting (IOptimizedNetwork *optNetPtr)
 
IOptimizedNetworkPtr Optimize (const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 
IOptimizedNetworkPtr Optimize (const Graph &inGraph, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options, Optional< std::vector< std::string > & > messages)
 Create an optimized version of the network. More...
 

Detailed Description

Definition at line 769 of file INetwork.hpp.

Constructor & Destructor Documentation

◆ IOptimizedNetwork() [1/4]

IOptimizedNetwork ( const IOptimizedNetwork other,
const ModelOptions modelOptions 
)

Creates a copy of the IOptimizedNetwork.

The IOptimizedNetwork will not be reoptimized, the provided ModelOptions will only be used when creating a LoadedNetwork.

Definition at line 462 of file Network.cpp.

463  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(*other.pOptimizedNetworkImpl.get(), modelOptions)) {}

◆ IOptimizedNetwork() [2/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph)

Definition at line 465 of file Network.cpp.

466  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph))) {}

◆ IOptimizedNetwork() [3/4]

IOptimizedNetwork ( std::unique_ptr< OptimizedNetworkImpl impl)

Definition at line 468 of file Network.cpp.

469  : pOptimizedNetworkImpl(std::move(impl)) {}

◆ ~IOptimizedNetwork()

~IOptimizedNetwork ( )
default

◆ IOptimizedNetwork() [4/4]

IOptimizedNetwork ( std::unique_ptr< Graph graph,
const ModelOptions modelOptions 
)
protected

Definition at line 471 of file Network.cpp.

472  : pOptimizedNetworkImpl(new OptimizedNetworkImpl(std::move(graph), modelOptions)) {}

Member Function Documentation

◆ Destroy()

void Destroy ( IOptimizedNetwork network)
static

Definition at line 476 of file Network.cpp.

477 {
478  delete network;
479 }

Referenced by armnn::Optimize().

◆ ExecuteStrategy()

void ExecuteStrategy ( IStrategy strategy) const

Definition at line 2731 of file Network.cpp.

2732 {
2733  pOptimizedNetworkImpl->ExecuteStrategy(strategy);
2734 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetGuid()

arm::pipe::ProfilingGuid GetGuid ( ) const

Definition at line 496 of file Network.cpp.

497 {
498  return pOptimizedNetworkImpl->GetGuid();
499 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumInputs()

size_t GetNumInputs ( ) const

Definition at line 501 of file Network.cpp.

502 {
503  return pOptimizedNetworkImpl->GetNumInputs();
504 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetNumOutputs()

size_t GetNumOutputs ( ) const

Definition at line 506 of file Network.cpp.

507 {
508  return pOptimizedNetworkImpl->GetNumOutputs();
509 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ GetProfiler()

const std::shared_ptr< IProfiler > & GetProfiler ( ) const

Definition at line 491 of file Network.cpp.

492 {
493  return pOptimizedNetworkImpl->GetGraph().GetProfiler();
494 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ PrintGraph()

Status PrintGraph ( )

Definition at line 481 of file Network.cpp.

482 {
483  return pOptimizedNetworkImpl->PrintGraph();
484 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

◆ SerializeToDot()

Status SerializeToDot ( std::ostream &  stream) const

Definition at line 486 of file Network.cpp.

487 {
488  return pOptimizedNetworkImpl->SerializeToDot(stream);
489 }

References IOptimizedNetwork::pOptimizedNetworkImpl.

Referenced by armnn_driver::ExportNetworkGraphToDotFile().

Friends And Related Function Documentation

◆ experimental::AsyncNetworkImpl

friend class experimental::AsyncNetworkImpl
friend

Definition at line 796 of file INetwork.hpp.

◆ experimental::WorkingMemHandle

friend class experimental::WorkingMemHandle
friend

Definition at line 797 of file INetwork.hpp.

◆ GetGraphForTesting

Graph& GetGraphForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 49 of file TestUtils.cpp.

50 {
51  return optNet->pOptimizedNetworkImpl->GetGraph();
52 }

◆ GetModelOptionsForTesting

ModelOptions& GetModelOptionsForTesting ( IOptimizedNetwork optNetPtr)
friend

Definition at line 54 of file TestUtils.cpp.

55 {
56  return optNet->pOptimizedNetworkImpl->GetModelOptions();
57 }

◆ LoadedNetwork

friend class LoadedNetwork
friend

Definition at line 794 of file INetwork.hpp.

◆ Optimize [1/2]

IOptimizedNetworkPtr Optimize ( const Graph inGraph,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptions options,
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
inGraphGraph to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 1572 of file Network.cpp.

1577 {
1578  ARMNN_LOG(debug) << options.ToString();
1579 
1580  // Enable profiling
1581  auto profiler = inGraph.GetProfiler();
1583  profiler->EnableProfiling(options.m_ProfilingEnabled);
1584 
1586  if (backendPreferences.empty())
1587  {
1588  throw InvalidArgumentException("Invoked Optimize with no backends specified");
1589  }
1590 
1591  if (options.m_ReduceFp32ToBf16)
1592  {
1593  throw InvalidArgumentException("BFloat16 optimization is currently ignored. In order to use Bf16 optimization "
1594  "Please use the FastMathEnabled backend option for CpuAcc or GpuAcc.");
1595  }
1596 
1597  if (options.m_ReduceFp32ToFp16 && options.m_ReduceFp32ToBf16)
1598  {
1599  throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time.");
1600  }
1601 
1602  // Ensure TensorInfo is set on all output slots of ConstantLayers in the graph
1603  inGraph.VerifyConstantLayerSetTensorInfo();
1604 
1605  std::unique_ptr<Graph> graph = std::make_unique<Graph>(inGraph);
1606 
1607  // We need to pass on the information about whether import and export is enabled to the LoadNetwork phase.
1608  // The mechanism to do that is to add model options to the optimized network.
1609  armnn::BackendOptions importExport("Global",
1610  {{"ImportEnabled", options.m_ImportEnabled},
1611  {"ExportEnabled", options.m_ExportEnabled}});
1612  ModelOptions optimizedOptions(options.m_ModelOptions);
1613  optimizedOptions.push_back(importExport);
1614 
1615  auto optNet = IOptimizedNetworkPtr(new IOptimizedNetwork(std::move(graph), optimizedOptions),
1617 
1618  IOptimizedNetwork* optNetObjPtr = optNet.get();
1619 
1620  // Get the optimized graph
1621  Graph& optGraph = optNetObjPtr->pOptimizedNetworkImpl->GetGraph();
1622 
1623  if(options.m_shapeInferenceMethod == ShapeInferenceMethod::InferAndValidate)
1624  {
1625  // Infer the tensor infos for all output slots. Throws an exception on failure
1626  optGraph.InferTensorInfos();
1627  }
1628 
1629  // Perform AddBroadcastReshapeLayer optimisation
1630  using namespace optimizations;
1632 
1633  if(options.m_shapeInferenceMethod == ShapeInferenceMethod::ValidateOnly)
1634  {
1635  // Validate the tensor infos for all output slots. Throws an exception on failure
1636  optGraph.InferTensorInfos();
1637  }
1638 
1639 
1640  // Group Constant Layer optimizations together where possible.
1641  // This is important as:
1642  // FusePermuteIntoConstantLayer must happen before FoldPadIntoDepthwiseConvolution2d and
1643  // FuseBatchNormIntoDepthwiseConvolution2D.
1644  // ConvertConstDequantisationLayersToConstLayers must happen before FoldPadIntoConvolution2d
1647  // Perform optimisation passes
1653  MovePermuteUp(),
1654  MoveTransposeUp(),
1655  PermuteAsReshape(),
1667 
1668  // If Fp32 to Fp16 optimization is set convert Fp32 network to Fp16
1669  if (options.m_ReduceFp32ToFp16)
1670  {
1671  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ReduceFp32ToFp16");
1674  }
1675 
1676  // Initialize backend settings
1677  BackendSettings backendSettings(backendPreferences, deviceSpec);
1678  if (backendSettings.GetAvailablePreferredBackends().empty())
1679  {
1680  std::stringstream failureMsg;
1681  failureMsg << "None of the preferred backends " << backendPreferences
1682  << " are supported. Current platform provides " << backendSettings.m_SupportedBackends;
1683  ReportError(failureMsg.str(), messages);
1684  throw InvalidArgumentException(failureMsg.str());
1685  }
1686 
1687  // Create a map to temporarily hold initialized backend objects
1688  TensorHandleFactoryRegistry tensorHandleFactoryRegistry;
1689  BackendsMap backends = CreateSupportedBackends(tensorHandleFactoryRegistry, backendSettings);
1690 
1691  // Assign an available backend to each layer
1692  Graph::Iterator firstLayer = optGraph.begin();
1693  Graph::Iterator lastLayer = optGraph.end();
1694  OptimizationResult assignBackendsResult = AssignBackends(optNetObjPtr->pOptimizedNetworkImpl.get(),
1695  backendSettings,
1696  firstLayer,
1697  lastLayer,
1698  messages);
1699  if (assignBackendsResult.m_Error)
1700  {
1701  // Failed to assign a backend to each layer
1702  throw InvalidArgumentException("Failed to assign a backend to each layer");
1703  }
1704 
1707 
1708  // Apply the backend-specific optimizations
1709  OptimizationResult backendOptimizationResult = ApplyBackendOptimizations(optNetObjPtr->pOptimizedNetworkImpl.get(),
1710  backendSettings,
1711  backends,
1712  options.m_ModelOptions,
1713  messages);
1714  if (backendOptimizationResult.m_Error)
1715  {
1716  // Failed to apply the backend-specific optimizations
1717  throw InvalidArgumentException("Failed to apply the backend-specific optimizations");
1718  }
1719 
1720  // Convert constants
1721  {
1722  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_ConvertConstants");
1725  }
1726 
1727  // This must occur after all topological changes to the graph and any redirection of variables
1728  // If the debug flag is set, then insert a DebugLayer after each layer
1729  // Doing this after applying the backend optimizations as they might have changed some layers
1730  if (options.m_Debug && !options.m_DebugToFile)
1731  {
1733  }
1734  else if (options.m_DebugToFile)
1735  {
1736  // Setup the output file path
1737  try
1738  {
1739  auto result = armnnUtils::Filesystem::CreateDirectory("/ArmNNIntermediateLayerOutputs");
1740  ARMNN_LOG(info) << "Intermediate tensors will be written to: " << result;
1742  }
1743  catch (const armnn::RuntimeException& e)
1744  {
1745  // If we cannot create the output directory then we'll issue a warning and continue.
1746  ARMNN_LOG(warning) << "Unable to print intermediate layer outputs : " << e.what();
1747  }
1748  }
1749 
1750  // Calculate the compatibility strategies for tensor handles
1751  OptimizationResult strategyResult = SelectTensorHandleStrategy(optGraph,
1752  backends,
1753  tensorHandleFactoryRegistry,
1754  options.m_ImportEnabled,
1755  options.m_ExportEnabled,
1756  messages);
1757 
1758  if (strategyResult.m_Error)
1759  {
1760  // Failed to apply the backend-specific optimizations
1762  }
1763 
1764  // Based on the tensor handle strategy determined above, insert copy layers where required.
1765  {
1766  ARMNN_SCOPED_PROFILING_EVENT(Compute::Undefined, "Optimizer_AddCompatibilityLayers");
1767  optGraph.AddCompatibilityLayers(backends, tensorHandleFactoryRegistry);
1768  }
1769 
1770  return optNet;
1771 }

◆ Optimize [2/2]

IOptimizedNetworkPtr Optimize ( const INetwork inNetwork,
const std::vector< BackendId > &  backendPreferences,
const IDeviceSpec deviceSpec,
const OptimizerOptions options = OptimizerOptions(),
Optional< std::vector< std::string > & >  messages = EmptyOptional() 
)
friend

Create an optimized version of the network.

Parameters
networkINetwork description of the network to be optimized.
backendPreferencesThe choice of the backend ordered by user preferences.
deviceSpecDeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec()
messagesIf there are failures or warnings a string describing same will be added to the vector
optionsOptimizerOptions object with optimizer configuration options
Returns
An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from armnn::Exception if process fails.

Definition at line 1773 of file Network.cpp.

1778 {
1779  return Optimize(inNetwork.pNetworkImpl->GetGraph(),
1780  backendPreferences,
1781  deviceSpec,
1782  options,
1783  messages);
1784 }

Member Data Documentation

◆ pOptimizedNetworkImpl


The documentation for this class was generated from the following files:
armnn::optimizations::InsertDebugToFileLayer
OptimizeForType< Layer, AddDebugToFileImpl > InsertDebugToFileLayer
Definition: AddDebug.hpp:54
armnn::optimizations::OptimizeInverseConversionsFp16
OptimizeForConnection< ConvertFp16ToFp32Layer, ConvertFp32ToFp16Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp16
Definition: OptimizeInverseConversions.hpp:42
armnn::SelectTensorHandleStrategy
OptimizationResult SelectTensorHandleStrategy(Graph &optGraph, BackendsMap &backends, TensorHandleFactoryRegistry &registry, bool importEnabled, bool exportEnabled, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1502
armnn::Graph::Iterator
LayerList::const_iterator Iterator
Definition: Graph.hpp:53
armnn::optimizations::FoldPadIntoConvolution2d
OptimizeForExclusiveConnection< PadLayer, Convolution2dLayer, pad_fold::FoldPadIntoConvolution2dImpl > FoldPadIntoConvolution2d
Definition: FoldPadIntoLayer2d.hpp:254
armnn::optimizations::FusePermuteIntoConstLayer
OptimizeForConnection< ConstantLayer, PermuteLayer, ConvertConstPermuteLayersToConstLayers > FusePermuteIntoConstLayer
Definition: ConvertConstPermuteLayersToConstLayers.hpp:124
armnn::IOptimizedNetworkPtr
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:253
armnn::optimizations::MoveTransposeUp
OptimizeForConnection< Layer, TransposeLayer, MoveTransposeUpImpl > MoveTransposeUp
Definition: MoveTransposeUp.hpp:77
armnn::Optimizer::Pass
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
armnnUtils::Filesystem::CreateDirectory
std::string CreateDirectory(std::string sPath)
Returns full path to temporary folder.
Definition: Filesystem.cpp:47
armnn::BackendsMap
std::map< BackendId, std::unique_ptr< class IBackendInternal > > BackendsMap
Definition: Network.hpp:262
armnn::ShapeInferenceMethod::ValidateOnly
@ ValidateOnly
Validate all output shapes.
armnn::optimizations::OptimizeInverseConversionsFp32
OptimizeForConnection< ConvertFp32ToFp16Layer, ConvertFp16ToFp32Layer, OptimizeInverseConversionsImpl > OptimizeInverseConversionsFp32
Definition: OptimizeInverseConversions.hpp:44
armnn::ApplyBackendOptimizations
OptimizationResult ApplyBackendOptimizations(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, BackendsMap &backends, const ModelOptions &modelOptions, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:1040
armnn::optimizations::OptimizeInverseTransposes
OptimizeForConnection< TransposeLayer, TransposeLayer, OptimizeInversePermutesImpl< TransposeLayer > > OptimizeInverseTransposes
Definition: OptimizeInversePermutes.hpp:45
armnn::BackendOptions
Struct for the users to pass backend specific options.
Definition: BackendOptions.hpp:22
armnn::ModelOptions
std::vector< BackendOptions > ModelOptions
Definition: BackendOptions.hpp:18
armnn::optimizations::SquashEqualReshapeSiblings
OptimizeForConnection< Layer, ReshapeLayer, SquashEqualSiblingsImpl< ReshapeLayer > > SquashEqualReshapeSiblings
Definition: SquashEqualSiblings.hpp:70
armnn::Exception::what
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
armnn::optimizations::AddBroadcastReshapeLayer
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
Definition: AddBroadcastReshapeLayer.hpp:94
armnn::CreateSupportedBackends
BackendsMap CreateSupportedBackends(TensorHandleFactoryRegistry &handleFactoryRegistry, BackendSettings &backendSettings)
Definition: Network.cpp:1021
armnn::MakeOptimizations
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::optimizations::MovePermuteUp
OptimizeForConnection< Layer, PermuteLayer, MovePermuteUpImpl > MovePermuteUp
Definition: MovePermuteUp.hpp:77
ARMNN_LOG
#define ARMNN_LOG(severity)
Definition: Logging.hpp:212
armnn::optimizations::ConvertConstantsFloatToHalf
ConvertConstants< Float32ToFloat16, IsFloat16Layer > ConvertConstantsFloatToHalf
Definition: ConvertConstants.hpp:99
armnn::RuntimeException
Definition: Exceptions.hpp:120
armnn::optimizations::SquashEqualTransposeSiblings
OptimizeForConnection< Layer, TransposeLayer, SquashEqualSiblingsImpl< TransposeLayer > > SquashEqualTransposeSiblings
Definition: SquashEqualSiblings.hpp:69
armnn::IOptimizedNetwork::Destroy
static void Destroy(IOptimizedNetwork *network)
Definition: Network.cpp:476
armnn::ReportError
void ReportError(const std::string &errorMessage, Optional< std::vector< std::string > & > errorMessages)
Definition: Network.cpp:532
armnn::IOptimizedNetwork::pOptimizedNetworkImpl
std::unique_ptr< OptimizedNetworkImpl > pOptimizedNetworkImpl
Definition: INetwork.hpp:814
ARMNN_SCOPED_PROFILING_EVENT
#define ARMNN_SCOPED_PROFILING_EVENT(backendId, name)
Definition: Profiling.hpp:220
armnn::optimizations::Fp32NetworkToFp16Converter
OptimizeForType< Layer, ConvertFp32NetworkToFp16Impl > Fp32NetworkToFp16Converter
Definition: ConvertFp32NetworkToFp16.hpp:87
armnn::ProfilerManager::GetInstance
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:593
armnn::IOptimizedNetwork::IOptimizedNetwork
IOptimizedNetwork(const IOptimizedNetwork &other, const ModelOptions &modelOptions)
Creates a copy of the IOptimizedNetwork.
Definition: Network.cpp:462
armnn::optimizations::InsertDebugLayer
OptimizeForType< Layer, AddDebugImpl > InsertDebugLayer
Definition: AddDebug.hpp:53
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat16
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoDepthwiseConvolution2DFloat16
Definition: FuseBatchNorm.hpp:237
armnn::optimizations::TransposeAsReshape
OptimizeForType< TransposeLayer, TransposeAsReshapeImpl > TransposeAsReshape
Definition: TransposeAsReshape.hpp:77
armnn::Compute::Undefined
@ Undefined
armnn::optimizations::FoldPadIntoDepthwiseConvolution2d
OptimizeForExclusiveConnection< PadLayer, DepthwiseConvolution2dLayer, pad_fold::FoldPadIntoDepthwiseConvolution2dImpl > FoldPadIntoDepthwiseConvolution2d
Definition: FoldPadIntoLayer2d.hpp:258
armnn::optimizations::PermuteAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< PermuteLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< PermuteLayer > > PermuteAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:102
armnn::optimizations::ConvertConstDequantisationLayersToConstLayers
OptimizeForConnection< ConstantLayer, DequantizeLayer, ConvertConstDequantisationLayersToConstLayersImpl > ConvertConstDequantisationLayersToConstLayers
Definition: ConvertConstDequantisationLayersToConstLayers.hpp:173
armnn::optimizations::OptimizeInversePermutes
OptimizeForConnection< PermuteLayer, PermuteLayer, OptimizeInversePermutesImpl< PermuteLayer > > OptimizeInversePermutes
Definition: OptimizeInversePermutes.hpp:43
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat16
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float16 > > FuseBatchNormIntoConvolution2DFloat16
Definition: FuseBatchNorm.hpp:227
armnn::optimizations::FoldPadIntoPooling2d
OptimizeForExclusiveConnection< PadLayer, Pooling2dLayer, pad_fold::FoldPadIntoPooling2dImpl > FoldPadIntoPooling2d
Definition: FoldPadIntoLayer2d.hpp:260
armnn::IOptimizedNetwork::Optimize
friend IOptimizedNetworkPtr Optimize(const INetwork &inNetwork, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options, Optional< std::vector< std::string > & > messages)
Create an optimized version of the network.
Definition: Network.cpp:1773
armnn::optimizations::FuseBatchNormIntoConvolution2DFloat32
OptimizeForExclusiveConnection< Convolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< Convolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoConvolution2DFloat32
Definition: FuseBatchNorm.hpp:222
armnn::optimizations::SquashEqualPermuteSiblings
OptimizeForConnection< Layer, PermuteLayer, SquashEqualSiblingsImpl< PermuteLayer > > SquashEqualPermuteSiblings
Definition: SquashEqualSiblings.hpp:67
armnn::optimizations::FuseBatchNormIntoDepthwiseConvolution2DFloat32
OptimizeForExclusiveConnection< DepthwiseConvolution2dLayer, BatchNormalizationLayer, FuseBatchNorm< DepthwiseConvolution2dLayer, armnn::DataType::Float32 > > FuseBatchNormIntoDepthwiseConvolution2DFloat32
Definition: FuseBatchNorm.hpp:232
armnn::optimizations::ConvertConstantsHalfToFloat
ConvertConstants< Float16ToFloat32, IsFloat32Layer > ConvertConstantsHalfToFloat
Definition: ConvertConstants.hpp:98
armnn::AssignBackends
OptimizationResult AssignBackends(OptimizedNetworkImpl *optNetObjPtr, BackendSettings &backendSettings, Graph::Iterator &firstLayer, Graph::Iterator &lastLayer, Optional< std::vector< std::string > & > errMessages)
Definition: Network.cpp:898
armnn::optimizations::PermuteAsReshape
OptimizeForType< PermuteLayer, PermuteAsReshapeImpl > PermuteAsReshape
Definition: PermuteAsReshape.hpp:66
armnn::optimizations::OptimizeConsecutiveReshapes
OptimizeForConnection< ReshapeLayer, ReshapeLayer, OptimizeConsecutiveReshapesImpl > OptimizeConsecutiveReshapes
Definition: OptimizeConsecutiveReshapes.hpp:61
armnn::optimizations::TransposeAndBatchToSpaceAsDepthToSpace
OptimizeForConnection< TransposeLayer, BatchToSpaceNdLayer, PermuteAndBatchToSpaceAsDepthToSpaceImpl< TransposeLayer > > TransposeAndBatchToSpaceAsDepthToSpace
Definition: PermuteAndBatchToSpaceAsDepthToSpace.hpp:104
armnn::ProfilerManager::RegisterProfiler
void RegisterProfiler(IProfiler *profiler)
Definition: Profiling.cpp:600
armnn::ShapeInferenceMethod::InferAndValidate
@ InferAndValidate
Infer missing output shapes and validate all output shapes.