15 #include <doctest/doctest.h> 24 return (opening ==
'{' && closing ==
'}') || (opening ==
'[' && closing ==
']');
29 std::stack<char> expStack;
30 for (
size_t i = 0; i < exp.length(); ++i)
32 if (exp[i] ==
'{' || exp[i] ==
'[')
34 expStack.push(exp[i]);
36 else if (exp[i] ==
'}' || exp[i] ==
']')
48 return expStack.empty();
53 std::vector<double> numbers;
55 std::string numberString;
56 for (
size_t i = 0; i < exp.size(); ++i)
62 else if (exp[i] ==
']' && inArray)
67 numbers.push_back(std::stod(numberString));
69 catch (std::invalid_argument
const&)
71 FAIL((
"Could not convert measurements to double: " + numberString));
77 else if (exp[i] ==
',' && inArray)
82 numbers.push_back(std::stod(numberString));
84 catch (std::invalid_argument
const&)
86 FAIL((
"Could not convert measurements to double: " + numberString));
90 else if (exp[i] !=
'[' && inArray && exp[i] !=
',' && exp[i] !=
' ')
92 numberString += exp[i];
100 std::vector<std::string> sections;
102 std::stack<size_t> s;
103 for (
size_t i = 0; i < exp.size(); i++)
105 if (exp.at(i) ==
'{')
109 else if (exp.at(i) ==
'}')
111 size_t from = s.top();
113 sections.push_back(exp.substr(from, i - from + 1));
122 using namespace armnn;
124 CHECK(!backends.empty());
140 softmaxDescriptor.
m_Axis = -1;
165 FAIL(
"Error occurred during Optimization, Optimize() returned nullptr.");
169 auto error = runtime->LoadNetwork(netId, std::move(optNet));
173 std::vector<uint8_t> inputData
178 std::vector<uint8_t> outputData(5);
180 TensorInfo inputTensorInfo2 = runtime->GetInputTensorInfo(netId, 0);
188 {0,
armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
191 runtime->GetProfiler(netId)->EnableProfiling(
true);
194 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
195 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
196 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
199 std::stringstream ss;
200 profilerManager.GetProfiler()->Print(ss);
209 CHECK(!measurementsVector.empty());
217 for (
size_t i = 0; i < sectionVector.size(); ++i)
220 if (sectionVector[i].find(
"\"ArmNN\":") != std::string::npos
221 || sectionVector[i].find(
"\"optimize_measurements\":") != std::string::npos
222 || sectionVector[i].find(
"\"loaded_network_measurements\":") != std::string::npos
223 || sectionVector[i].find(
"\"inference_measurements\":") != std::string::npos)
225 sectionVector.erase(sectionVector.begin() +
static_cast<int>(i));
228 CHECK(!sectionVector.empty());
230 CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
231 [](std::string i) {
return (i.find(
"\"raw\":") != std::string::npos); }));
233 CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
234 [](std::string i) {
return (i.find(
"\"unit\":") != std::string::npos); }));
238 result.erase(std::remove_if (result.begin(),result.end(),
239 [](
char c) {
return c ==
'.'; }), result.end());
240 result.erase(std::remove_if (result.begin(), result.end(), &isdigit), result.end());
241 result.erase(std::remove_if (result.begin(),result.end(),
242 [](
char c) {
return c ==
'\t'; }), result.end());
244 CHECK(result.find(
"ArmNN") != std::string::npos);
245 CHECK(result.find(
"inference_measurements") != std::string::npos);
262 CHECK(result.find(
"OpenClKernelTimer/: softmax_layer_max_shift_exp_sum_quantized_serial GWS[,,]")
263 != std::string::npos);
267 CHECK(result.find(
"NeonKernelTimer") != std::string::npos);
269 bool softmaxCheck = ((result.find(
"softmax") != std::string::npos) ||
270 (result.find(
"Softmax") != std::string::npos) ||
271 (result.find(
"SoftMax") != std::string::npos));
static IRuntimePtr Create(const CreationOptions &options)
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
int m_Axis
Scalar, defaulted to the last index (-1), specifying the dimension the activation will be performed o...
static ProfilerManager & GetInstance()
void RunSoftmaxProfilerJsonPrinterTest(const std::vector< armnn::BackendId > &backends)
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Copyright (c) 2021 ARM Limited and Contributors.
std::string GetSoftmaxProfilerJson(const std::vector< armnn::BackendId > &backends)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
bool AreMatchingPair(const char opening, const char closing)
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
std::vector< std::string > ExtractSections(const std::string &exp)
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
void SetQuantizationScale(float scale)
GPU Execution: OpenCL: ArmCompute.
ArmNN performs an optimization on each model/network before it gets loaded for execution.
bool m_EnableGpuProfiling
Setting this flag will allow the user to obtain GPU profiling information from the runtime...
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
void SetQuantizationOffset(int32_t offset)
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
virtual int Connect(IInputSlot &destination)=0
std::string & StringTrim(std::string &str, const std::string &chars="\\\")
Trim from both the start and the end of a string.
std::vector< double > ExtractMeasurements(const std::string &exp)
static INetworkPtr Create(NetworkOptions networkOptions={})
void ValidateProfilerJson(std::string &result)
A SoftmaxDescriptor for the SoftmaxLayer.
bool AreParenthesesMatching(const std::string &exp)