ArmNN
 21.02
JsonPrinterTestImpl.cpp File Reference
#include "JsonPrinterTestImpl.hpp"
#include "armnn/utility/StringUtils.hpp"
#include <Profiling.hpp>
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <boost/test/unit_test.hpp>
#include <sstream>
#include <stack>
#include <string>

Go to the source code of this file.

Functions

bool AreMatchingPair (const char opening, const char closing)
 
bool AreParenthesesMatching (const std::string &exp)
 
std::vector< double > ExtractMeasurements (const std::string &exp)
 
std::vector< std::string > ExtractSections (const std::string &exp)
 
std::string GetSoftmaxProfilerJson (const std::vector< armnn::BackendId > &backends)
 
void ValidateProfilerJson (std::string &result)
 
void RunSoftmaxProfilerJsonPrinterTest (const std::vector< armnn::BackendId > &backends)
 

Function Documentation

◆ AreMatchingPair()

bool AreMatchingPair ( const char  opening,
const char  closing 
)
inline

Definition at line 21 of file JsonPrinterTestImpl.cpp.

Referenced by AreParenthesesMatching().

22 {
23  return (opening == '{' && closing == '}') || (opening == '[' && closing == ']');
24 }

◆ AreParenthesesMatching()

bool AreParenthesesMatching ( const std::string &  exp)

Definition at line 26 of file JsonPrinterTestImpl.cpp.

References AreMatchingPair().

Referenced by ValidateProfilerJson().

27 {
28  std::stack<char> expStack;
29  for (size_t i = 0; i < exp.length(); ++i)
30  {
31  if (exp[i] == '{' || exp[i] == '[')
32  {
33  expStack.push(exp[i]);
34  }
35  else if (exp[i] == '}' || exp[i] == ']')
36  {
37  if (expStack.empty() || !AreMatchingPair(expStack.top(), exp[i]))
38  {
39  return false;
40  }
41  else
42  {
43  expStack.pop();
44  }
45  }
46  }
47  return expStack.empty();
48 }
bool AreMatchingPair(const char opening, const char closing)

◆ ExtractMeasurements()

std::vector<double> ExtractMeasurements ( const std::string &  exp)

Definition at line 50 of file JsonPrinterTestImpl.cpp.

References armnn::stringUtils::StringTrim().

Referenced by ValidateProfilerJson().

51 {
52  std::vector<double> numbers;
53  bool inArray = false;
54  std::string numberString;
55  for (size_t i = 0; i < exp.size(); ++i)
56  {
57  if (exp[i] == '[')
58  {
59  inArray = true;
60  }
61  else if (exp[i] == ']' && inArray)
62  {
63  try
64  {
65  armnn::stringUtils::StringTrim(numberString, "\t,\n");
66  numbers.push_back(std::stod(numberString));
67  }
68  catch (std::invalid_argument const&)
69  {
70  BOOST_FAIL("Could not convert measurements to double: " + numberString);
71  }
72 
73  numberString.clear();
74  inArray = false;
75  }
76  else if (exp[i] == ',' && inArray)
77  {
78  try
79  {
80  armnn::stringUtils::StringTrim(numberString, "\t,\n");
81  numbers.push_back(std::stod(numberString));
82  }
83  catch (std::invalid_argument const&)
84  {
85  BOOST_FAIL("Could not convert measurements to double: " + numberString);
86  }
87  numberString.clear();
88  }
89  else if (exp[i] != '[' && inArray && exp[i] != ',' && exp[i] != ' ')
90  {
91  numberString += exp[i];
92  }
93  }
94  return numbers;
95 }
std::string & StringTrim(std::string &str, const std::string &chars="\\\")
Trim from both the start and the end of a string.
Definition: StringUtils.hpp:77

◆ ExtractSections()

std::vector<std::string> ExtractSections ( const std::string &  exp)

Definition at line 97 of file JsonPrinterTestImpl.cpp.

Referenced by ValidateProfilerJson().

98 {
99  std::vector<std::string> sections;
100 
101  std::stack<size_t> s;
102  for (size_t i = 0; i < exp.size(); i++)
103  {
104  if (exp.at(i) == '{')
105  {
106  s.push(i);
107  }
108  else if (exp.at(i) == '}')
109  {
110  size_t from = s.top();
111  s.pop();
112  sections.push_back(exp.substr(from, i - from + 1));
113  }
114  }
115 
116  return sections;
117 }

◆ GetSoftmaxProfilerJson()

std::string GetSoftmaxProfilerJson ( const std::vector< armnn::BackendId > &  backends)

Definition at line 119 of file JsonPrinterTestImpl.cpp.

References IOutputSlot::Connect(), armnn::CpuAcc, IRuntime::Create(), INetwork::Create(), armnn::error, IConnectableLayer::GetInputSlot(), ProfilerManager::GetInstance(), IConnectableLayer::GetOutputSlot(), armnn::GpuAcc, SoftmaxDescriptor::m_Axis, IRuntime::CreationOptions::m_EnableGpuProfiling, armnn::Optimize(), armnn::QAsymmU8, TensorInfo::SetQuantizationOffset(), TensorInfo::SetQuantizationScale(), IOutputSlot::SetTensorInfo(), and armnn::Success.

Referenced by RunSoftmaxProfilerJsonPrinterTest().

120 {
121  using namespace armnn;
122 
123  BOOST_CHECK(!backends.empty());
124 
126 
127  // Create runtime in which test will run
129  options.m_EnableGpuProfiling = backends.front() == armnn::Compute::GpuAcc;
130  IRuntimePtr runtime(IRuntime::Create(options));
131 
132  // build up the structure of the network
134 
135  IConnectableLayer* input = net->AddInputLayer(0, "input");
136  SoftmaxDescriptor softmaxDescriptor;
137  // Set Axis to -1 if CL or Neon until further Axes are supported.
138  if ( backends.front() == armnn::Compute::CpuAcc || backends.front() == armnn::Compute::GpuAcc)
139  {
140  softmaxDescriptor.m_Axis = -1;
141  }
142  IConnectableLayer* softmax = net->AddSoftmaxLayer(softmaxDescriptor, "softmax");
143  IConnectableLayer* output = net->AddOutputLayer(0, "output");
144 
145  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
146  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
147 
148  // set the tensors in the network
149  TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
150  inputTensorInfo.SetQuantizationOffset(100);
151  inputTensorInfo.SetQuantizationScale(10000.0f);
152  input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
153 
154  TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
155  outputTensorInfo.SetQuantizationOffset(0);
156  outputTensorInfo.SetQuantizationScale(1.0f / 256.0f);
157  softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
158 
159  // optimize the network
160  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
161  if(!optNet)
162  {
163  BOOST_FAIL("Error occurred during Optimization, Optimize() returned nullptr.");
164  }
165  // load it into the runtime
166  NetworkId netId;
167  auto error = runtime->LoadNetwork(netId, std::move(optNet));
168  BOOST_TEST(error == Status::Success);
169 
170  // create structures for input & output
171  std::vector<uint8_t> inputData
172  {
173  1, 10, 3, 200, 5
174  // one of inputs is sufficiently larger than the others to saturate softmax
175  };
176  std::vector<uint8_t> outputData(5);
177 
178  armnn::InputTensors inputTensors
179  {
180  {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
181  };
182  armnn::OutputTensors outputTensors
183  {
184  {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
185  };
186 
187  runtime->GetProfiler(netId)->EnableProfiling(true);
188 
189  // do the inferences
190  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
191  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
192  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
193 
194  // retrieve the Profiler.Print() output
195  std::stringstream ss;
196  profilerManager.GetProfiler()->Print(ss);
197 
198  return ss.str();
199 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:37
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:62
int m_Axis
Scalar, defaulted to the last index (-1), specifying the dimension the activation will be performed o...
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:489
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:26
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:340
int NetworkId
Definition: IRuntime.hpp:20
Copyright (c) 2021 ARM Limited and Contributors.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:306
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1502
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:314
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:341
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:174
GPU Execution: OpenCL: ArmCompute.
bool m_EnableGpuProfiling
Setting this flag will allow the user to obtain GPU profiling information from the runtime...
Definition: IRuntime.hpp:56
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:173
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:510
A SoftmaxDescriptor for the SoftmaxLayer.

◆ RunSoftmaxProfilerJsonPrinterTest()

void RunSoftmaxProfilerJsonPrinterTest ( const std::vector< armnn::BackendId > &  backends)

Definition at line 245 of file JsonPrinterTestImpl.cpp.

References armnn::CpuAcc, GetSoftmaxProfilerJson(), armnn::GpuAcc, and ValidateProfilerJson().

Referenced by BOOST_AUTO_TEST_CASE().

246 {
247  // setup the test fixture and obtain JSON Printer result
248  std::string result = GetSoftmaxProfilerJson(backends);
249 
250  // validate the JSON Printer result
251  ValidateProfilerJson(result);
252 
253  const armnn::BackendId& firstBackend = backends.at(0);
254  if (firstBackend == armnn::Compute::GpuAcc)
255  {
256  BOOST_CHECK(result.find("OpenClKernelTimer/: softmax_layer_max_shift_exp_sum_quantized_serial GWS[,,]")
257  != std::string::npos);
258  }
259  else if (firstBackend == armnn::Compute::CpuAcc)
260  {
261  BOOST_CHECK(result.find("NeonKernelTimer/: CpuLogitsDLogSoftmaxKernel_#") != std::string::npos);
262  }
263 }
std::string GetSoftmaxProfilerJson(const std::vector< armnn::BackendId > &backends)
GPU Execution: OpenCL: ArmCompute.
CPU Execution: NEON: ArmCompute.
void ValidateProfilerJson(std::string &result)

◆ ValidateProfilerJson()

void ValidateProfilerJson ( std::string &  result)
inline

Definition at line 201 of file JsonPrinterTestImpl.cpp.

References AreParenthesesMatching(), ExtractMeasurements(), and ExtractSections().

Referenced by RunSoftmaxProfilerJsonPrinterTest().

202 {
203  // ensure all measurements are greater than zero
204  std::vector<double> measurementsVector = ExtractMeasurements(result);
205  BOOST_CHECK(!measurementsVector.empty());
206 
207  // check sections contain raw and unit tags
208  // first ensure Parenthesis are balanced
209  if (AreParenthesesMatching(result))
210  {
211  // remove parent sections that will not have raw or unit tag
212  std::vector<std::string> sectionVector = ExtractSections(result);
213  for (size_t i = 0; i < sectionVector.size(); ++i)
214  {
215 
216  if (sectionVector[i].find("\"ArmNN\":") != std::string::npos
217  || sectionVector[i].find("\"inference_measurements\":") != std::string::npos)
218  {
219  sectionVector.erase(sectionVector.begin() + static_cast<int>(i));
220  }
221  }
222  BOOST_CHECK(!sectionVector.empty());
223 
224  BOOST_CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
225  [](std::string i) { return (i.find("\"raw\":") != std::string::npos); }));
226 
227  BOOST_CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
228  [](std::string i) { return (i.find("\"unit\":") != std::string::npos); }));
229  }
230 
231  // remove the time measurements as they vary from test to test
232  result.erase(std::remove_if (result.begin(),result.end(),
233  [](char c) { return c == '.'; }), result.end());
234  result.erase(std::remove_if (result.begin(), result.end(), &isdigit), result.end());
235  result.erase(std::remove_if (result.begin(),result.end(),
236  [](char c) { return c == '\t'; }), result.end());
237 
238  BOOST_CHECK(result.find("ArmNN") != std::string::npos);
239  BOOST_CHECK(result.find("inference_measurements") != std::string::npos);
240 
241  // ensure no spare parenthesis present in print output
242  BOOST_CHECK(AreParenthesesMatching(result));
243 }
std::vector< std::string > ExtractSections(const std::string &exp)
std::vector< double > ExtractMeasurements(const std::string &exp)
bool AreParenthesesMatching(const std::string &exp)