aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorJan Eilers <jan.eilers@arm.com>2021-09-07 12:46:15 +0100
committerJan Eilers <jan.eilers@arm.com>2021-09-13 10:05:00 +0100
commit284b5d19c8cf446b7eec16ea560c544ac39008c1 (patch)
tree8a4d34db3fff091bacd15b004b9c6e6e4b8248ba /tests
parent65d5d2ddf77c69e76643e40440aa986defe6d0d7 (diff)
downloadarmnn-284b5d19c8cf446b7eec16ea560c544ac39008c1.tar.gz
Add 'do-not-print-output' option to ExNet
Signed-off-by: Jan Eilers <jan.eilers@arm.com> Change-Id: I10fb010ee8d3f813d2264cefb526f352e30d7046
Diffstat (limited to 'tests')
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp63
-rw-r--r--tests/ExecuteNetwork/ExecuteNetworkParams.hpp1
-rw-r--r--tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp5
-rw-r--r--tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp61
-rw-r--r--tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp4
5 files changed, 101 insertions, 33 deletions
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 16d34c8c9d..8acab71290 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -238,9 +238,12 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
return EXIT_FAILURE;
}
- for (int i = 0; i < outputSize; ++i)
+ if (!params.m_DontPrintOutputs)
{
- printf("%f ", tfLiteDelageOutputData[i]);
+ for (int i = 0; i < outputSize; ++i)
+ {
+ printf("%f ", tfLiteDelageOutputData[i]);
+ }
}
}
else if (params.m_OutputTypes[outputIndex].compare("int") == 0)
@@ -253,9 +256,12 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
return EXIT_FAILURE;
}
- for (int i = 0; i < outputSize; ++i)
+ if (!params.m_DontPrintOutputs)
{
- printf("%d ", tfLiteDelageOutputData[i]);
+ for (int i = 0; i < outputSize; ++i)
+ {
+ printf("%d ", tfLiteDelageOutputData[i]);
+ }
}
}
else if (params.m_OutputTypes[outputIndex].compare("qsymms8") == 0)
@@ -268,9 +274,12 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
return EXIT_FAILURE;
}
- for (int i = 0; i < outputSize; ++i)
+ if (!params.m_DontPrintOutputs)
{
- printf("%d ", tfLiteDelageOutputData[i]);
+ for (int i = 0; i < outputSize; ++i)
+ {
+ printf("%d ", tfLiteDelageOutputData[i]);
+ }
}
}
else if (params.m_OutputTypes[outputIndex].compare("qasymm8") == 0 ||
@@ -284,9 +293,12 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
return EXIT_FAILURE;
}
- for (int i = 0; i < outputSize; ++i)
+ if (!params.m_DontPrintOutputs)
{
- printf("%u ", tfLiteDelageOutputData[i]);
+ for (int i = 0; i < outputSize; ++i)
+ {
+ printf("%u ", tfLiteDelageOutputData[i]);
+ }
}
}
else
@@ -472,6 +484,10 @@ int MainImpl(const ExecuteNetworkParams& params,
{
ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
}
+ if (params.m_DontPrintOutputs)
+ {
+ ARMNN_LOG(info) << "Printing outputs to console is disabled.";
+ }
// Print output tensors
const auto& infosOut = model.GetOutputBindingInfos();
@@ -479,9 +495,9 @@ int MainImpl(const ExecuteNetworkParams& params,
{
const armnn::TensorInfo& infoOut = infosOut[i].second;
- // We've made sure before that the number of output files either equals numOutputs, in which case
- // we override those files when processing the results of each iteration (only the result of the
- // last iteration will be stored), or there are enough
+ // We've made sure before that the number of output files either equals numOutputs, in which
+ // case we override those files when processing the results of each iteration (only the result
+ // of the last iteration will be stored), or there are enough
// output files for each output of each iteration.
size_t outputFileIndex = x * numOutputs + i;
if (!params.m_OutputTensorFiles.empty())
@@ -499,7 +515,8 @@ int MainImpl(const ExecuteNetworkParams& params,
TensorPrinter printer(inferenceModelParams.m_OutputBindings[i],
infoOut,
outputTensorFile,
- params.m_DequantizeOutput);
+ params.m_DequantizeOutput,
+ !params.m_DontPrintOutputs);
mapbox::util::apply_visitor(printer, outputs[x][i]);
}
@@ -575,14 +592,18 @@ int MainImpl(const ExecuteNetworkParams& params,
{
ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
}
+ if (params.m_DontPrintOutputs)
+ {
+ ARMNN_LOG(info) << "Printing outputs to console is disabled.";
+ }
// Print output tensors
const auto& infosOut = model.GetOutputBindingInfos();
for (size_t i = 0; i < numOutputs; i++)
{
// We've made sure before that the number of output files either equals numOutputs, in which
- // case we override those files when processing the results of each iteration (only the result
- // of the last iteration will be stored), or there are enough
+ // case we override those files when processing the results of each iteration (only the
+ // result of the last iteration will be stored), or there are enough
// output files for each output of each iteration.
size_t outputFileIndex = iteration * numOutputs + i;
if (!params.m_OutputTensorFiles.empty())
@@ -602,7 +623,8 @@ int MainImpl(const ExecuteNetworkParams& params,
TensorPrinter printer(inferenceModelParams.m_OutputBindings[i],
infoOut,
outputTensorFile,
- params.m_DequantizeOutput);
+ params.m_DequantizeOutput,
+ !params.m_DontPrintOutputs);
mapbox::util::apply_visitor(printer, inferenceOutputMap.at(cb->GetInferenceId())[i]);
}
@@ -683,14 +705,18 @@ int MainImpl(const ExecuteNetworkParams& params,
{
ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
}
+ if (params.m_DontPrintOutputs)
+ {
+ ARMNN_LOG(info) << "Printing outputs to console is disabled.";
+ }
// Print output tensors
const auto& infosOut = model.GetOutputBindingInfos();
for (size_t i = 0; i < numOutputs; i++)
{
// We've made sure before that the number of output files either equals numOutputs, in which
- // case we override those files when processing the results of each iteration (only the result
- // of the last iteration will be stored), or there are enough
+ // case we override those files when processing the results of each iteration (only the
+ // result of the last iteration will be stored), or there are enough
// output files for each output of each iteration.
size_t outputFileIndex = j * numOutputs + i;
if (!params.m_OutputTensorFiles.empty())
@@ -709,7 +735,8 @@ int MainImpl(const ExecuteNetworkParams& params,
TensorPrinter printer(inferenceModelParams.m_OutputBindings[i],
infoOut,
outputTensorFile,
- params.m_DequantizeOutput);
+ params.m_DequantizeOutput,
+ !params.m_DontPrintOutputs);
mapbox::util::apply_visitor(printer, outputs[j][i]);
}
diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
index e519b028a0..db8194b3f9 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
@@ -50,6 +50,7 @@ struct ExecuteNetworkParams
std::vector<std::string> m_OutputTypes;
bool m_ParseUnsupported = false;
bool m_PrintIntermediate;
+ bool m_DontPrintOutputs;
bool m_QuantizeInput;
bool m_SaveCachedNetwork;
size_t m_SimultaneousIterations;
diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
index 927d804725..b1c87d088a 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
@@ -254,6 +254,11 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork",
"Add unsupported operators as stand-in layers (where supported by parser)",
cxxopts::value<bool>(m_ExNetParams.m_ParseUnsupported)->default_value("false")->implicit_value("true"))
+ ("do-not-print-output",
+ "The default behaviour of ExecuteNetwork is to print the resulting outputs on the console. "
+ "This behaviour can be changed by adding this flag to your command.",
+ cxxopts::value<bool>(m_ExNetParams.m_DontPrintOutputs)->default_value("false")->implicit_value("true"))
+
("q,quantize-input",
"If this option is enabled, all float inputs will be quantized as appropriate for the model's inputs. "
"If unset, default to not quantized. Accepted values (true or false)",
diff --git a/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp b/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
index 0906c1cf3f..da3188c1e7 100644
--- a/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
+++ b/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
@@ -116,19 +116,26 @@ std::vector<std::string> ParseStringList(const std::string& inputString, const c
TensorPrinter::TensorPrinter(const std::string& binding,
const armnn::TensorInfo& info,
const std::string& outputTensorFile,
- bool dequantizeOutput)
+ bool dequantizeOutput,
+ const bool printToConsole)
: m_OutputBinding(binding)
, m_Scale(info.GetQuantizationScale())
, m_Offset(info.GetQuantizationOffset())
, m_OutputTensorFile(outputTensorFile)
- , m_DequantizeOutput(dequantizeOutput) {}
+ , m_DequantizeOutput(dequantizeOutput)
+ , m_PrintToConsole(printToConsole) {}
void TensorPrinter::operator()(const std::vector<float>& values)
{
- ForEachValue(values, [](float value)
+ if (m_PrintToConsole)
{
- printf("%f ", value);
- });
+ std::cout << m_OutputBinding << ": ";
+ ForEachValue(values, [](float value)
+ {
+ printf("%f ", value);
+ });
+ printf("\n");
+ }
WriteToFile(values);
}
@@ -142,9 +149,19 @@ void TensorPrinter::operator()(const std::vector<uint8_t>& values)
ForEachValue(values, [&scale, &offset, &dequantizedValues](uint8_t value)
{
auto dequantizedValue = armnn::Dequantize(value, scale, offset);
- printf("%f ", dequantizedValue);
dequantizedValues.push_back(dequantizedValue);
});
+
+ if (m_PrintToConsole)
+ {
+ std::cout << m_OutputBinding << ": ";
+ ForEachValue(dequantizedValues, [](float value)
+ {
+ printf("%f ", value);
+ });
+ printf("\n");
+ }
+
WriteToFile(dequantizedValues);
}
else
@@ -156,31 +173,47 @@ void TensorPrinter::operator()(const std::vector<uint8_t>& values)
void TensorPrinter::operator()(const std::vector<int8_t>& values)
{
- ForEachValue(values, [](int8_t value)
+ if (m_PrintToConsole)
{
- printf("%d ", value);
- });
+ std::cout << m_OutputBinding << ": ";
+ ForEachValue(values, [](int8_t value)
+ {
+ printf("%d ", value);
+ });
+ printf("\n");
+ }
WriteToFile(values);
}
void TensorPrinter::operator()(const std::vector<int>& values)
{
- ForEachValue(values, [](int value)
+ if (m_PrintToConsole)
{
- printf("%d ", value);
- });
+ std::cout << m_OutputBinding << ": ";
+ ForEachValue(values, [](int value)
+ {
+ printf("%d ", value);
+ });
+ printf("\n");
+ }
WriteToFile(values);
}
template<typename Container, typename Delegate>
void TensorPrinter::ForEachValue(const Container& c, Delegate delegate)
{
- std::cout << m_OutputBinding << ": ";
+ if (m_PrintToConsole)
+ {
+ std::cout << m_OutputBinding << ": ";
+ }
for (const auto& value : c)
{
delegate(value);
}
- printf("\n");
+ if (m_PrintToConsole)
+ {
+ printf("\n");
+ }
}
template<typename T>
diff --git a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp
index d92c17c5e5..0abda4f8ee 100644
--- a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp
+++ b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp
@@ -26,7 +26,8 @@ struct TensorPrinter
TensorPrinter(const std::string& binding,
const armnn::TensorInfo& info,
const std::string& outputTensorFile,
- bool dequantizeOutput);
+ bool dequantizeOutput,
+ bool printToConsole = true);
void operator()(const std::vector<float>& values);
@@ -48,6 +49,7 @@ private:
int m_Offset;
std::string m_OutputTensorFile;
bool m_DequantizeOutput;
+ bool m_PrintToConsole;
};
using TContainer =