ArmNN
 24.02
CanonicalUtils.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 #include <armnn/ArmNN.hpp>
8 
9 #include <CpuExecutor.h>
10 #include <nnapi/OperandTypes.h>
11 #include <nnapi/Result.h>
12 #include <nnapi/Types.h>
13 
14 #include <vector>
15 #include <string>
16 #include <fstream>
17 #include <iomanip>
18 
19 namespace armnn_driver
20 {
21 
22 using namespace android::nn;
23 
25 
26 template <typename OperandType>
27 class UnsupportedOperand: public std::runtime_error
28 {
29 public:
31  : std::runtime_error("Operand type is unsupported")
32  , m_type(type)
33  {}
34 
36 };
37 
38 /// Swizzles tensor data in @a input according to the dimension mappings.
40  const void* input,
41  void* output,
42  const armnn::PermutationVector& mappings);
43 
44 /// Returns a pointer to a specific location in a pool`
45 void* GetMemoryFromPool(DataLocation location,
46  const std::vector<android::nn::RunTimePoolInfo>& memPools);
47 
48 void* GetMemoryFromPointer(const Request::Argument& requestArg);
49 
51 
52 std::string GetOperandSummary(const Operand& operand);
53 
54 bool isQuantizedOperand(const OperandType& operandType);
55 
56 std::string GetModelSummary(const Model& model);
57 
58 template <typename TensorType>
59 void DumpTensor(const std::string& dumpDir,
60  const std::string& requestName,
61  const std::string& tensorName,
62  const TensorType& tensor);
63 
64 void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
65  const std::string& dumpDir,
66  armnn::NetworkId networkId,
67  const armnn::IProfiler* profiler);
68 
69 std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
70  const std::string& dumpDir);
71 
72 std::string SerializeNetwork(const armnn::INetwork& network,
73  const std::string& dumpDir,
74  std::vector<uint8_t>& dataCacheData,
75  bool dataCachingActive = true);
76 
77 void RenameExportedFiles(const std::string& existingSerializedFileName,
78  const std::string& existingDotFileName,
79  const std::string& dumpDir,
80  const armnn::NetworkId networkId);
81 
82 void RenameFile(const std::string& existingName,
83  const std::string& extension,
84  const std::string& dumpDir,
85  const armnn::NetworkId networkId);
86 
87 /// Checks if a tensor info represents a dynamic tensor
88 bool IsDynamicTensor(const armnn::TensorInfo& outputInfo);
89 
90 /// Checks for ArmNN support of dynamic tensors.
92 
93 std::string GetFileTimestamp();
94 
95 inline OutputShape ComputeShape(const armnn::TensorInfo& info)
96 {
97  OutputShape shape;
98 
99  armnn::TensorShape tensorShape = info.GetShape();
100  // Android will expect scalars as a zero dimensional tensor
102  {
103  shape.dimensions = std::vector<uint32_t>{};
104  }
105  else
106  {
107  std::vector<uint32_t> dimensions;
108  const unsigned int numDims = tensorShape.GetNumDimensions();
109  dimensions.resize(numDims);
110  for (unsigned int outputIdx = 0u; outputIdx < numDims; ++outputIdx)
111  {
112  dimensions[outputIdx] = tensorShape[outputIdx];
113  }
114  shape.dimensions = dimensions;
115  }
116 
117  shape.isSufficient = true;
118 
119  return shape;
120 }
121 
122 void CommitPools(std::vector<::android::nn::RunTimePoolInfo>& memPools);
123 
124 } // namespace armnn_driver
armnn_driver::DumpJsonProfilingIfRequired
void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled, const std::string &dumpDir, armnn::NetworkId networkId, const armnn::IProfiler *profiler)
Definition: CanonicalUtils.cpp:352
armnn_driver::Operand
::android::nn::Operand Operand
Definition: ConversionUtils.hpp:46
armnn_driver::IsDynamicTensor
bool IsDynamicTensor(const armnn::TensorInfo &tensorInfo)
Checks if a tensor info represents a dynamic tensor.
Definition: CanonicalUtils.cpp:491
armnn_driver::CommitPools
void CommitPools(std::vector<::android::nn::RunTimePoolInfo > &memPools)
Definition: CanonicalUtils.cpp:615
armnn::TensorInfo
Definition: Tensor.hpp:152
armnn_driver::isQuantizedOperand
bool isQuantizedOperand(const OperandType &operandType)
Definition: CanonicalUtils.cpp:510
armnn::IOptimizedNetwork
Definition: INetwork.hpp:901
armnn_driver::ExportNetworkGraphToDotFile
std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork &optimizedNetwork, const std::string &dumpDir)
Definition: CanonicalUtils.cpp:392
armnn_driver::g_DontPermute
const armnn::PermutationVector g_DontPermute
Definition: CanonicalUtils.cpp:38
armnn_driver::OperandType
::android::nn::OperandType OperandType
Definition: ConversionUtils.hpp:48
armnn::TensorShape
Definition: Tensor.hpp:20
armnn::NetworkId
int NetworkId
Definition: IRuntime.hpp:35
armnn_driver::SwizzleAndroidNn4dTensorToArmNn
void SwizzleAndroidNn4dTensorToArmNn(armnn::TensorInfo &tensorInfo, const void *input, void *output, const armnn::PermutationVector &mappings)
Swizzles tensor data in input according to the dimension mappings.
Definition: CanonicalUtils.cpp:40
armnn_driver::ComputeShape
OutputShape ComputeShape(const armnn::TensorInfo &info)
Definition: CanonicalUtils.hpp:95
armnn_driver::GetFileTimestamp
std::string GetFileTimestamp()
Definition: CanonicalUtils.cpp:560
armnn::TensorShape::GetNumDimensions
unsigned int GetNumDimensions() const
Function that returns the tensor rank.
Definition: Tensor.cpp:174
armnn_driver
Helper classes.
Definition: ArmnnDevice.cpp:37
armnn_driver::GetMemoryFromPool
void * GetMemoryFromPool(DataLocation location, const std::vector< android::nn::RunTimePoolInfo > &memPools)
Returns a pointer to a specific location in a pool`.
Definition: CanonicalUtils.cpp:66
armnn_driver::Model
::android::nn::Model Model
Helper classes.
Definition: ConversionUtils.hpp:45
armnn::Dimensionality::Scalar
@ Scalar
armnn_driver::UnsupportedOperand
Definition: CanonicalUtils.hpp:27
armnn_driver::SerializeNetwork
std::string SerializeNetwork(const armnn::INetwork &network, const std::string &dumpDir, std::vector< uint8_t > &dataCacheData, bool dataCachingActive)
Definition: CanonicalUtils.cpp:432
armnn::PermutationVector
Definition: Types.hpp:314
armnn_driver::GetModelSummary
std::string GetModelSummary(const Model &model)
Definition: CanonicalUtils.cpp:526
armnn_driver::RenameFile
void RenameFile(const std::string &existingName, const std::string &extension, const std::string &dumpDir, const armnn::NetworkId networkId)
Definition: CanonicalUtils.cpp:593
armnn_driver::RenameExportedFiles
void RenameExportedFiles(const std::string &existingSerializedFileName, const std::string &existingDotFileName, const std::string &dumpDir, const armnn::NetworkId networkId)
Definition: CanonicalUtils.cpp:580
armnn_driver::GetMemoryFromPointer
void * GetMemoryFromPointer(const Request::Argument &requestArg)
Definition: CanonicalUtils.cpp:77
std
Definition: BackendId.hpp:149
android::nn
Definition: support_library_service.cpp:10
armnn_driver::GetTensorInfoForOperand
armnn::TensorInfo GetTensorInfoForOperand(const Operand &operand)
Definition: CanonicalUtils.cpp:97
armnn_driver::DumpTensor
void DumpTensor(const std::string &dumpDir, const std::string &requestName, const std::string &tensorName, const TensorType &tensor)
Definition: CanonicalUtils.cpp:219
ArmNN.hpp
armnn_driver::UnsupportedOperand::m_type
OperandType m_type
Definition: CanonicalUtils.hpp:35
armnn::IProfiler
Definition: IProfiler.hpp:21
armnn_driver::AreDynamicTensorsSupported
bool AreDynamicTensorsSupported()
Checks for ArmNN support of dynamic tensors.
Definition: CanonicalUtils.cpp:505
armnn_driver::GetOperandSummary
std::string GetOperandSummary(const Operand &operand)
Definition: CanonicalUtils.cpp:191
armnn::TensorShape::GetDimensionality
Dimensionality GetDimensionality() const
Function that returns the tensor type.
Definition: Tensor.hpp:92
armnn_driver::UnsupportedOperand::UnsupportedOperand
UnsupportedOperand(const OperandType type)
Definition: CanonicalUtils.hpp:30
armnn::INetwork
Main network class which provides the interface for building up a neural network.
Definition: INetwork.hpp:347