ArmNN
 22.05.01
JsonPrinterTestImpl.cpp File Reference
#include "JsonPrinterTestImpl.hpp"
#include "armnn/utility/StringUtils.hpp"
#include <Profiling.hpp>
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <doctest/doctest.h>
#include <sstream>
#include <stack>
#include <string>
#include <algorithm>

Go to the source code of this file.

Functions

bool AreMatchingPair (const char opening, const char closing)
 
bool AreParenthesesMatching (const std::string &exp)
 
std::vector< double > ExtractMeasurements (const std::string &exp)
 
std::vector< std::string > ExtractSections (const std::string &exp)
 
std::string GetSoftmaxProfilerJson (const std::vector< armnn::BackendId > &backends)
 
void ValidateProfilerJson (std::string &result)
 
void RunSoftmaxProfilerJsonPrinterTest (const std::vector< armnn::BackendId > &backends)
 

Function Documentation

◆ AreMatchingPair()

bool AreMatchingPair ( const char  opening,
const char  closing 
)
inline

Definition at line 22 of file JsonPrinterTestImpl.cpp.

Referenced by AreParenthesesMatching().

23 {
24  return (opening == '{' && closing == '}') || (opening == '[' && closing == ']');
25 }

◆ AreParenthesesMatching()

bool AreParenthesesMatching ( const std::string &  exp)

Definition at line 27 of file JsonPrinterTestImpl.cpp.

References AreMatchingPair().

Referenced by ValidateProfilerJson().

28 {
29  std::stack<char> expStack;
30  for (size_t i = 0; i < exp.length(); ++i)
31  {
32  if (exp[i] == '{' || exp[i] == '[')
33  {
34  expStack.push(exp[i]);
35  }
36  else if (exp[i] == '}' || exp[i] == ']')
37  {
38  if (expStack.empty() || !AreMatchingPair(expStack.top(), exp[i]))
39  {
40  return false;
41  }
42  else
43  {
44  expStack.pop();
45  }
46  }
47  }
48  return expStack.empty();
49 }
bool AreMatchingPair(const char opening, const char closing)

◆ ExtractMeasurements()

std::vector<double> ExtractMeasurements ( const std::string &  exp)

Definition at line 51 of file JsonPrinterTestImpl.cpp.

References armnn::stringUtils::StringTrim().

Referenced by ValidateProfilerJson().

52 {
53  std::vector<double> numbers;
54  bool inArray = false;
55  std::string numberString;
56  for (size_t i = 0; i < exp.size(); ++i)
57  {
58  if (exp[i] == '[')
59  {
60  inArray = true;
61  }
62  else if (exp[i] == ']' && inArray)
63  {
64  try
65  {
66  armnn::stringUtils::StringTrim(numberString, "\t,\n");
67  numbers.push_back(std::stod(numberString));
68  }
69  catch (std::invalid_argument const&)
70  {
71  FAIL(("Could not convert measurements to double: " + numberString));
72  }
73 
74  numberString.clear();
75  inArray = false;
76  }
77  else if (exp[i] == ',' && inArray)
78  {
79  try
80  {
81  armnn::stringUtils::StringTrim(numberString, "\t,\n");
82  numbers.push_back(std::stod(numberString));
83  }
84  catch (std::invalid_argument const&)
85  {
86  FAIL(("Could not convert measurements to double: " + numberString));
87  }
88  numberString.clear();
89  }
90  else if (exp[i] != '[' && inArray && exp[i] != ',' && exp[i] != ' ')
91  {
92  numberString += exp[i];
93  }
94  }
95  return numbers;
96 }
std::string & StringTrim(std::string &str, const std::string &chars="\\\")
Trim from both the start and the end of a string.
Definition: StringUtils.hpp:80

◆ ExtractSections()

std::vector<std::string> ExtractSections ( const std::string &  exp)

Definition at line 98 of file JsonPrinterTestImpl.cpp.

Referenced by ValidateProfilerJson().

99 {
100  std::vector<std::string> sections;
101 
102  std::stack<size_t> s;
103  for (size_t i = 0; i < exp.size(); i++)
104  {
105  if (exp.at(i) == '{')
106  {
107  s.push(i);
108  }
109  else if (exp.at(i) == '}')
110  {
111  size_t from = s.top();
112  s.pop();
113  sections.push_back(exp.substr(from, i - from + 1));
114  }
115  }
116 
117  return sections;
118 }

◆ GetSoftmaxProfilerJson()

std::string GetSoftmaxProfilerJson ( const std::vector< armnn::BackendId > &  backends)

Definition at line 120 of file JsonPrinterTestImpl.cpp.

References IOutputSlot::Connect(), armnn::CpuAcc, IRuntime::Create(), INetwork::Create(), armnn::error, IConnectableLayer::GetInputSlot(), ProfilerManager::GetInstance(), IConnectableLayer::GetOutputSlot(), armnn::GpuAcc, SoftmaxDescriptor::m_Axis, IRuntime::CreationOptions::m_EnableGpuProfiling, OptimizerOptions::m_ProfilingEnabled, armnn::Optimize(), armnn::QAsymmU8, TensorInfo::SetConstant(), TensorInfo::SetQuantizationOffset(), TensorInfo::SetQuantizationScale(), IOutputSlot::SetTensorInfo(), and armnn::Success.

Referenced by RunSoftmaxProfilerJsonPrinterTest().

121 {
122  using namespace armnn;
123 
124  CHECK(!backends.empty());
125 
127 
128  // Create runtime in which test will run
130  options.m_EnableGpuProfiling = backends.front() == armnn::Compute::GpuAcc;
131  IRuntimePtr runtime(IRuntime::Create(options));
132 
133  // build up the structure of the network
135  IConnectableLayer* input = net->AddInputLayer(0, "input");
136  SoftmaxDescriptor softmaxDescriptor;
137  // Set Axis to -1 if CL or Neon until further Axes are supported.
138  if ( backends.front() == armnn::Compute::CpuAcc || backends.front() == armnn::Compute::GpuAcc)
139  {
140  softmaxDescriptor.m_Axis = -1;
141  }
142  IConnectableLayer* softmax = net->AddSoftmaxLayer(softmaxDescriptor, "softmax");
143  IConnectableLayer* output = net->AddOutputLayer(0, "output");
144 
145  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
146  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
147 
148  // set the tensors in the network
149  TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
150  inputTensorInfo.SetQuantizationOffset(100);
151  inputTensorInfo.SetQuantizationScale(10000.0f);
152  input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
153 
154  TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
155  outputTensorInfo.SetQuantizationOffset(0);
156  outputTensorInfo.SetQuantizationScale(1.0f / 256.0f);
157  softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
158 
159  // optimize the network
160  armnn::OptimizerOptions optOptions;
161  optOptions.m_ProfilingEnabled = true;
162  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optOptions);
163  if(!optNet)
164  {
165  FAIL("Error occurred during Optimization, Optimize() returned nullptr.");
166  }
167  // load it into the runtime
168  NetworkId netId;
169  auto error = runtime->LoadNetwork(netId, std::move(optNet));
170  CHECK(error == Status::Success);
171 
172  // create structures for input & output
173  std::vector<uint8_t> inputData
174  {
175  1, 10, 3, 200, 5
176  // one of inputs is sufficiently larger than the others to saturate softmax
177  };
178  std::vector<uint8_t> outputData(5);
179 
180  TensorInfo inputTensorInfo2 = runtime->GetInputTensorInfo(netId, 0);
181  inputTensorInfo2.SetConstant(true);
182  armnn::InputTensors inputTensors
183  {
184  {0, armnn::ConstTensor(inputTensorInfo2, inputData.data())}
185  };
186  armnn::OutputTensors outputTensors
187  {
188  {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
189  };
190 
191  runtime->GetProfiler(netId)->EnableProfiling(true);
192 
193  // do the inferences
194  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
195  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
196  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
197 
198  // retrieve the Profiler.Print() output
199  std::stringstream ss;
200  profilerManager.GetProfiler()->Print(ss);
201 
202  return ss.str();
203 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:49
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
int m_Axis
Scalar, defaulted to the last index (-1), specifying the dimension the activation will be performed o...
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:572
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:33
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:392
Copyright (c) 2021 ARM Limited and Contributors.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:319
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1847
int NetworkId
Definition: IRuntime.hpp:27
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:393
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:242
GPU Execution: OpenCL: ArmCompute.
ArmNN performs an optimization on each model/network before it gets loaded for execution.
Definition: INetwork.hpp:137
bool m_EnableGpuProfiling
Setting this flag will allow the user to obtain GPU profiling information from the runtime...
Definition: IRuntime.hpp:93
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
Definition: Tensor.cpp:514
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:476
A SoftmaxDescriptor for the SoftmaxLayer.

◆ RunSoftmaxProfilerJsonPrinterTest()

void RunSoftmaxProfilerJsonPrinterTest ( const std::vector< armnn::BackendId > &  backends)

Definition at line 251 of file JsonPrinterTestImpl.cpp.

References armnn::CpuAcc, GetSoftmaxProfilerJson(), armnn::GpuAcc, and ValidateProfilerJson().

Referenced by TEST_CASE_FIXTURE(), and TEST_SUITE().

252 {
253  // setup the test fixture and obtain JSON Printer result
254  std::string result = GetSoftmaxProfilerJson(backends);
255 
256  // validate the JSON Printer result
257  ValidateProfilerJson(result);
258 
259  const armnn::BackendId& firstBackend = backends.at(0);
260  if (firstBackend == armnn::Compute::GpuAcc)
261  {
262  CHECK(result.find("OpenClKernelTimer/: softmax_layer_max_shift_exp_sum_quantized_serial GWS[,,]")
263  != std::string::npos);
264  }
265  else if (firstBackend == armnn::Compute::CpuAcc)
266  {
267  CHECK(result.find("NeonKernelTimer") != std::string::npos); // Validate backend
268 
269  bool softmaxCheck = ((result.find("softmax") != std::string::npos) || // Validate softmax
270  (result.find("Softmax") != std::string::npos) ||
271  (result.find("SoftMax") != std::string::npos));
272  CHECK(softmaxCheck);
273 
274  }
275 }
std::string GetSoftmaxProfilerJson(const std::vector< armnn::BackendId > &backends)
GPU Execution: OpenCL: ArmCompute.
CPU Execution: NEON: ArmCompute.
void ValidateProfilerJson(std::string &result)

◆ ValidateProfilerJson()

void ValidateProfilerJson ( std::string &  result)
inline

Definition at line 205 of file JsonPrinterTestImpl.cpp.

References AreParenthesesMatching(), ExtractMeasurements(), and ExtractSections().

Referenced by RunSoftmaxProfilerJsonPrinterTest().

206 {
207  // ensure all measurements are greater than zero
208  std::vector<double> measurementsVector = ExtractMeasurements(result);
209  CHECK(!measurementsVector.empty());
210 
211  // check sections contain raw and unit tags
212  // first ensure Parenthesis are balanced
213  if (AreParenthesesMatching(result))
214  {
215  // remove parent sections that will not have raw or unit tag
216  std::vector<std::string> sectionVector = ExtractSections(result);
217  for (size_t i = 0; i < sectionVector.size(); ++i)
218  {
219 
220  if (sectionVector[i].find("\"ArmNN\":") != std::string::npos
221  || sectionVector[i].find("\"optimize_measurements\":") != std::string::npos
222  || sectionVector[i].find("\"loaded_network_measurements\":") != std::string::npos
223  || sectionVector[i].find("\"inference_measurements\":") != std::string::npos)
224  {
225  sectionVector.erase(sectionVector.begin() + static_cast<int>(i));
226  }
227  }
228  CHECK(!sectionVector.empty());
229 
230  CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
231  [](std::string i) { return (i.find("\"raw\":") != std::string::npos); }));
232 
233  CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
234  [](std::string i) { return (i.find("\"unit\":") != std::string::npos); }));
235  }
236 
237  // remove the time measurements as they vary from test to test
238  result.erase(std::remove_if (result.begin(),result.end(),
239  [](char c) { return c == '.'; }), result.end());
240  result.erase(std::remove_if (result.begin(), result.end(), &isdigit), result.end());
241  result.erase(std::remove_if (result.begin(),result.end(),
242  [](char c) { return c == '\t'; }), result.end());
243 
244  CHECK(result.find("ArmNN") != std::string::npos);
245  CHECK(result.find("inference_measurements") != std::string::npos);
246 
247  // ensure no spare parenthesis present in print output
248  CHECK(AreParenthesesMatching(result));
249 }
std::vector< std::string > ExtractSections(const std::string &exp)
std::vector< double > ExtractMeasurements(const std::string &exp)
bool AreParenthesesMatching(const std::string &exp)