ArmNN
 21.08
JsonPrinterTestImpl.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
8 
9 #include <Profiling.hpp>
10 
11 #include <armnn/Descriptors.hpp>
12 #include <armnn/IRuntime.hpp>
13 #include <armnn/INetwork.hpp>
14 
15 #include <doctest/doctest.h>
16 
17 #include <sstream>
18 #include <stack>
19 #include <string>
20 #include <algorithm>
21 
22 inline bool AreMatchingPair(const char opening, const char closing)
23 {
24  return (opening == '{' && closing == '}') || (opening == '[' && closing == ']');
25 }
26 
27 bool AreParenthesesMatching(const std::string& exp)
28 {
29  std::stack<char> expStack;
30  for (size_t i = 0; i < exp.length(); ++i)
31  {
32  if (exp[i] == '{' || exp[i] == '[')
33  {
34  expStack.push(exp[i]);
35  }
36  else if (exp[i] == '}' || exp[i] == ']')
37  {
38  if (expStack.empty() || !AreMatchingPair(expStack.top(), exp[i]))
39  {
40  return false;
41  }
42  else
43  {
44  expStack.pop();
45  }
46  }
47  }
48  return expStack.empty();
49 }
50 
51 std::vector<double> ExtractMeasurements(const std::string& exp)
52 {
53  std::vector<double> numbers;
54  bool inArray = false;
55  std::string numberString;
56  for (size_t i = 0; i < exp.size(); ++i)
57  {
58  if (exp[i] == '[')
59  {
60  inArray = true;
61  }
62  else if (exp[i] == ']' && inArray)
63  {
64  try
65  {
66  armnn::stringUtils::StringTrim(numberString, "\t,\n");
67  numbers.push_back(std::stod(numberString));
68  }
69  catch (std::invalid_argument const&)
70  {
71  FAIL("Could not convert measurements to double: " + numberString);
72  }
73 
74  numberString.clear();
75  inArray = false;
76  }
77  else if (exp[i] == ',' && inArray)
78  {
79  try
80  {
81  armnn::stringUtils::StringTrim(numberString, "\t,\n");
82  numbers.push_back(std::stod(numberString));
83  }
84  catch (std::invalid_argument const&)
85  {
86  FAIL("Could not convert measurements to double: " + numberString);
87  }
88  numberString.clear();
89  }
90  else if (exp[i] != '[' && inArray && exp[i] != ',' && exp[i] != ' ')
91  {
92  numberString += exp[i];
93  }
94  }
95  return numbers;
96 }
97 
98 std::vector<std::string> ExtractSections(const std::string& exp)
99 {
100  std::vector<std::string> sections;
101 
102  std::stack<size_t> s;
103  for (size_t i = 0; i < exp.size(); i++)
104  {
105  if (exp.at(i) == '{')
106  {
107  s.push(i);
108  }
109  else if (exp.at(i) == '}')
110  {
111  size_t from = s.top();
112  s.pop();
113  sections.push_back(exp.substr(from, i - from + 1));
114  }
115  }
116 
117  return sections;
118 }
119 
120 std::string GetSoftmaxProfilerJson(const std::vector<armnn::BackendId>& backends)
121 {
122  using namespace armnn;
123 
124  CHECK(!backends.empty());
125 
127 
128  // Create runtime in which test will run
130  options.m_EnableGpuProfiling = backends.front() == armnn::Compute::GpuAcc;
131  IRuntimePtr runtime(IRuntime::Create(options));
132 
133  // build up the structure of the network
135 
136  IConnectableLayer* input = net->AddInputLayer(0, "input");
137  SoftmaxDescriptor softmaxDescriptor;
138  // Set Axis to -1 if CL or Neon until further Axes are supported.
139  if ( backends.front() == armnn::Compute::CpuAcc || backends.front() == armnn::Compute::GpuAcc)
140  {
141  softmaxDescriptor.m_Axis = -1;
142  }
143  IConnectableLayer* softmax = net->AddSoftmaxLayer(softmaxDescriptor, "softmax");
144  IConnectableLayer* output = net->AddOutputLayer(0, "output");
145 
146  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
147  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
148 
149  // set the tensors in the network
150  TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
151  inputTensorInfo.SetQuantizationOffset(100);
152  inputTensorInfo.SetQuantizationScale(10000.0f);
153  input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
154 
155  TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
156  outputTensorInfo.SetQuantizationOffset(0);
157  outputTensorInfo.SetQuantizationScale(1.0f / 256.0f);
158  softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
159 
160  // optimize the network
161  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
162  if(!optNet)
163  {
164  FAIL("Error occurred during Optimization, Optimize() returned nullptr.");
165  }
166  // load it into the runtime
167  NetworkId netId;
168  auto error = runtime->LoadNetwork(netId, std::move(optNet));
169  CHECK(error == Status::Success);
170 
171  // create structures for input & output
172  std::vector<uint8_t> inputData
173  {
174  1, 10, 3, 200, 5
175  // one of inputs is sufficiently larger than the others to saturate softmax
176  };
177  std::vector<uint8_t> outputData(5);
178 
179  armnn::InputTensors inputTensors
180  {
181  {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
182  };
183  armnn::OutputTensors outputTensors
184  {
185  {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
186  };
187 
188  runtime->GetProfiler(netId)->EnableProfiling(true);
189 
190  // do the inferences
191  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
192  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
193  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
194 
195  // retrieve the Profiler.Print() output
196  std::stringstream ss;
197  profilerManager.GetProfiler()->Print(ss);
198 
199  return ss.str();
200 }
201 
202 inline void ValidateProfilerJson(std::string& result)
203 {
204  // ensure all measurements are greater than zero
205  std::vector<double> measurementsVector = ExtractMeasurements(result);
206  CHECK(!measurementsVector.empty());
207 
208  // check sections contain raw and unit tags
209  // first ensure Parenthesis are balanced
210  if (AreParenthesesMatching(result))
211  {
212  // remove parent sections that will not have raw or unit tag
213  std::vector<std::string> sectionVector = ExtractSections(result);
214  for (size_t i = 0; i < sectionVector.size(); ++i)
215  {
216 
217  if (sectionVector[i].find("\"ArmNN\":") != std::string::npos
218  || sectionVector[i].find("\"inference_measurements\":") != std::string::npos)
219  {
220  sectionVector.erase(sectionVector.begin() + static_cast<int>(i));
221  }
222  }
223  CHECK(!sectionVector.empty());
224 
225  CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
226  [](std::string i) { return (i.find("\"raw\":") != std::string::npos); }));
227 
228  CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
229  [](std::string i) { return (i.find("\"unit\":") != std::string::npos); }));
230  }
231 
232  // remove the time measurements as they vary from test to test
233  result.erase(std::remove_if (result.begin(),result.end(),
234  [](char c) { return c == '.'; }), result.end());
235  result.erase(std::remove_if (result.begin(), result.end(), &isdigit), result.end());
236  result.erase(std::remove_if (result.begin(),result.end(),
237  [](char c) { return c == '\t'; }), result.end());
238 
239  CHECK(result.find("ArmNN") != std::string::npos);
240  CHECK(result.find("inference_measurements") != std::string::npos);
241 
242  // ensure no spare parenthesis present in print output
243  CHECK(AreParenthesesMatching(result));
244 }
245 
246 void RunSoftmaxProfilerJsonPrinterTest(const std::vector<armnn::BackendId>& backends)
247 {
248  // setup the test fixture and obtain JSON Printer result
249  std::string result = GetSoftmaxProfilerJson(backends);
250 
251  // validate the JSON Printer result
252  ValidateProfilerJson(result);
253 
254  const armnn::BackendId& firstBackend = backends.at(0);
255  if (firstBackend == armnn::Compute::GpuAcc)
256  {
257  CHECK(result.find("OpenClKernelTimer/: softmax_layer_max_shift_exp_sum_quantized_serial GWS[,,]")
258  != std::string::npos);
259  }
260  else if (firstBackend == armnn::Compute::CpuAcc)
261  {
262  CHECK(result.find("NeonKernelTimer") != std::string::npos); // Validate backend
263 
264  bool softmaxCheck = ((result.find("softmax") != std::string::npos) || // Validate softmax
265  (result.find("Softmax") != std::string::npos) ||
266  (result.find("SoftMax") != std::string::npos));
267  CHECK(softmaxCheck);
268 
269  }
270 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:39
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
int m_Axis
Scalar, defaulted to the last index (-1), specifying the dimension the activation will be performed o...
static ProfilerManager & GetInstance()
Definition: Profiling.cpp:526
void RunSoftmaxProfilerJsonPrinterTest(const std::vector< armnn::BackendId > &backends)
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:30
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:360
Copyright (c) 2021 ARM Limited and Contributors.
std::string GetSoftmaxProfilerJson(const std::vector< armnn::BackendId > &backends)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
bool AreMatchingPair(const char opening, const char closing)
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:319
std::vector< std::string > ExtractSections(const std::string &exp)
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1613
int NetworkId
Definition: IRuntime.hpp:24
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:361
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:173
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:475
GPU Execution: OpenCL: ArmCompute.
bool m_EnableGpuProfiling
Setting this flag will allow the user to obtain GPU profiling information from the runtime...
Definition: IRuntime.hpp:116
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:491
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172
virtual int Connect(IInputSlot &destination)=0
std::string & StringTrim(std::string &str, const std::string &chars="\\\")
Trim from both the start and the end of a string.
Definition: StringUtils.hpp:77
std::vector< double > ExtractMeasurements(const std::string &exp)
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:530
void ValidateProfilerJson(std::string &result)
A SoftmaxDescriptor for the SoftmaxLayer.
bool AreParenthesesMatching(const std::string &exp)