aboutsummaryrefslogtreecommitdiff
path: root/tests/ExecuteNetwork
diff options
context:
space:
mode:
Diffstat (limited to 'tests/ExecuteNetwork')
-rw-r--r--tests/ExecuteNetwork/ArmNNExecutor.cpp155
-rw-r--r--tests/ExecuteNetwork/ExecuteNetworkParams.hpp3
-rw-r--r--tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp12
3 files changed, 160 insertions, 10 deletions
diff --git a/tests/ExecuteNetwork/ArmNNExecutor.cpp b/tests/ExecuteNetwork/ArmNNExecutor.cpp
index 4518f1426f..ece3dafea4 100644
--- a/tests/ExecuteNetwork/ArmNNExecutor.cpp
+++ b/tests/ExecuteNetwork/ArmNNExecutor.cpp
@@ -7,27 +7,157 @@
#include "ArmNNExecutor.hpp"
#include "NetworkExecutionUtils/NetworkExecutionUtils.hpp"
-#include <armnn/IAsyncExecutionCallback.hpp>
#include <AsyncExecutionCallback.hpp>
-
-
+#include <armnn/IAsyncExecutionCallback.hpp>
+#if defined(ARMNN_SERIALIZER)
+#include <armnnSerializer/ISerializer.hpp>
+#endif
using namespace armnn;
using namespace std::chrono;
+#if defined(ARMNN_SERIALIZER)
+/**
+ * Given a reference to an INetwork and a target directory, serialize the network to a file
+ * called "<timestamp>_network.armnn"
+ *
+ * @param network The network to serialize.
+ * @param dumpDir The target directory.
+ * @return the full path to the serialized file.
+ */
+std::string SerializeNetwork(const armnn::INetwork& network, const std::string& dumpDir)
+{
+ if (dumpDir.empty())
+ {
+ throw InvalidArgumentException("An output directory must be specified.");
+ }
+ fs::path outputDirectory(dumpDir);
+ if (!exists(outputDirectory))
+ {
+ throw InvalidArgumentException(
+ fmt::format("The specified directory does not exist: {}", outputDirectory.c_str()));
+ }
+ auto serializer(armnnSerializer::ISerializer::Create());
+ // Serialize the Network
+ serializer->Serialize(network);
+
+ fs::path fileName;
+ fileName += dumpDir;
+ // used to get a timestamp to name diagnostic files (the ArmNN serialized graph
+ // and getSupportedOperations.txt files)
+ timespec ts;
+ if (clock_gettime(CLOCK_MONOTONIC_RAW, &ts) == 0)
+ {
+ std::stringstream ss;
+ ss << std::to_string(ts.tv_sec) << "_" << std::to_string(ts.tv_nsec) << "_network.armnn";
+ fileName += ss.str();
+ }
+ else
+ {
+ // This is incredibly unlikely but just in case.
+ throw RuntimeException("clock_gettime, CLOCK_MONOTONIC_RAW returned a non zero result.");
+ }
+
+ // Save serialized network to a file
+ std::ofstream serializedFile(fileName, std::ios::out | std::ios::binary);
+ auto serialized = serializer->SaveSerializedToStream(serializedFile);
+ if (!serialized)
+ {
+ throw RuntimeException(fmt::format("An error occurred when serializing to file %s", fileName.c_str()));
+ }
+ serializedFile.flush();
+ serializedFile.close();
+ return fileName;
+}
+
+/**
+ * Given a reference to an optimized network and a target directory, serialize the network in .dot file format to
+ * a file called "<timestamp>_optimized_networkgraph.dot"
+ *
+ * @param network The network to serialize.
+ * @param dumpDir The target directory.
+ * @return the full path to the serialized file.
+ */
+std::string SerializeNetworkToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork, const std::string& dumpDir)
+{
+ if (dumpDir.empty())
+ {
+ throw InvalidArgumentException("An output directory must be specified.");
+ }
+ fs::path outputDirectory(dumpDir);
+ if (!exists(outputDirectory))
+ {
+ throw InvalidArgumentException(
+ fmt::format("The specified directory does not exist: {}", outputDirectory.c_str()));
+ }
+
+ fs::path fileName;
+ fileName += dumpDir;
+ // used to get a timestamp to name diagnostic files (the ArmNN serialized graph
+ // and getSupportedOperations.txt files)
+ timespec ts;
+ if (clock_gettime(CLOCK_MONOTONIC_RAW, &ts) == 0)
+ {
+ std::stringstream ss;
+ ss << std::to_string(ts.tv_sec) << "_" << std::to_string(ts.tv_nsec) << "_optimized_networkgraph.dot";
+ fileName += ss.str();
+ }
+ else
+ {
+ // This is incredibly unlikely but just in case.
+ throw RuntimeException("clock_gettime, CLOCK_MONOTONIC_RAW returned a non zero result.");
+ }
+
+ // Write the network graph to a dot file.
+ std::ofstream fileStream;
+ fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
+ if (!fileStream.good())
+ {
+ throw RuntimeException(fmt::format("An error occurred when creating %s", fileName.c_str()));
+ }
+
+ if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
+ {
+ throw RuntimeException(fmt::format("An error occurred when serializing to file %s", fileName.c_str()));
+ }
+ fileStream.flush();
+ fileStream.close();
+ return fileName;
+}
+#endif
+
ArmNNExecutor::ArmNNExecutor(const ExecuteNetworkParams& params, armnn::IRuntime::CreationOptions runtimeOptions)
-: m_Params(params)
+ : m_Params(params)
{
- runtimeOptions.m_EnableGpuProfiling = params.m_EnableProfiling;
+ runtimeOptions.m_EnableGpuProfiling = params.m_EnableProfiling;
runtimeOptions.m_DynamicBackendsPath = params.m_DynamicBackendsPath;
// Create/Get the static ArmNN Runtime. Note that the m_Runtime will be shared by all ArmNNExecutor
// instances so the RuntimeOptions cannot be altered for different ArmNNExecutor instances.
m_Runtime = GetRuntime(runtimeOptions);
- auto parser = CreateParser();
+ auto parser = CreateParser();
auto network = parser->CreateNetwork(m_Params);
- auto optNet = OptimizeNetwork(network.get());
+ auto optNet = OptimizeNetwork(network.get());
+ // If the user has asked for detailed data write out the .armnn amd .dot files.
+ if (params.m_SerializeToArmNN)
+ {
+#if defined(ARMNN_SERIALIZER)
+ // .armnn first.
+ // This could throw multiple exceptions if the directory cannot be created or the file cannot be written.
+ std::string targetDirectory(armnnUtils::Filesystem::CreateDirectory("/ArmNNSerializeNetwork"));
+ std::string fileName;
+ fileName = SerializeNetwork(*network, targetDirectory);
+ ARMNN_LOG(info) << "The pre-optimized network has been serialized to:" << fileName;
+ // and the .dot file.
+ // Most of the possible exceptions should have already occurred with the .armnn file.
+ fileName =
+ SerializeNetworkToDotFile(*optNet, targetDirectory);
+ ARMNN_LOG(info) << "The optimized network has been serialized to:" << fileName;
+#else
+ ARMNN_LOG(info) << "Arm NN has not been built with ARMNN_SERIALIZER enabled.";
+#endif
+ }
m_IOInfo = GetIOInfo(optNet.get());
armnn::ProfilingDetailsMethod profilingDetailsMethod = ProfilingDetailsMethod::Undefined;
@@ -97,6 +227,9 @@ ArmNNExecutor::~ArmNNExecutor()
{
profiler->Print(std::cout);
}
+
+ // We're finished with the network.
+ m_Runtime->UnloadNetwork(m_NetworkId);
}
void ArmNNExecutor::ExecuteAsync()
@@ -176,6 +309,12 @@ void ArmNNExecutor::ExecuteAsync()
void ArmNNExecutor::ExecuteSync()
{
+ // If we've only been asked to serialize the networks, don't execute the inference.
+ if (m_Params.m_SerializeToArmNN)
+ {
+ ARMNN_LOG(info) << "serialize-to-armnn has been specified. No inference will be executed.";
+ return;
+ }
for (size_t x = 0; x < m_Params.m_Iterations; x++)
{
std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkId);
@@ -800,6 +939,7 @@ armnn::BindingPointInfo ArmNNExecutor::TfliteParser::GetOutputBindingPointInfo(s
#if defined(ARMNN_ONNX_PARSER)
+ARMNN_NO_DEPRECATE_WARN_BEGIN
ArmNNExecutor::OnnxParser::OnnxParser() : m_Parser(armnnOnnxParser::IOnnxParser::Create()){}
armnn::INetworkPtr ArmNNExecutor::OnnxParser::CreateNetwork(const ExecuteNetworkParams& params)
@@ -843,4 +983,5 @@ armnn::BindingPointInfo ArmNNExecutor::OnnxParser::GetOutputBindingPointInfo(siz
{
return m_Parser->GetNetworkOutputBindingInfo(outputName);
}
+ARMNN_NO_DEPRECATE_WARN_END
#endif
diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
index ffcb4f482c..c2bfb951d5 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
@@ -1,5 +1,5 @@
//
-// Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
+// Copyright © 2022, 2024 Arm Ltd and Contributors. All rights reserved.
// SPDX-License-Identifier: MIT
//
@@ -67,6 +67,7 @@ struct ExecuteNetworkParams
std::string m_ComparisonFile;
std::vector<armnn::BackendId> m_ComparisonComputeDevices;
bool m_CompareWithTflite;
+ bool m_SerializeToArmNN;
// Ensures that the parameters for ExecuteNetwork fit together
void ValidateParams();
diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
index 5c1337f769..87b38c5f78 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
@@ -216,7 +216,8 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork",
("m,model-path",
"Path to model file, e.g. .armnn, .tflite, .onnx. "
- "DEPRECATED: .pb and .prototxt model files no longer load and are deprecated.",
+ "DEPRECATED: .pb and .prototxt model files no longer loaded and are deprecated."
+ "DEPRECATED: .onnx model files will no longer loaded from 24.08 onwards.",
cxxopts::value<std::string>(m_ExNetParams.m_ModelPath));
m_CxxOptions.add_options("b) Ordering")
@@ -237,7 +238,8 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork",
cxxopts::value<std::string>(m_RuntimeOptions.m_DynamicBackendsPath))
("P, thread-pool-size",
- "Run the network using the Arm NN thread pool with the number of threads provided. ",
+ "Run the network using the Arm NN thread pool with the number of threads provided. "
+ "DECRECATED: The asynchronous execution interface will be removed in 24.08",
cxxopts::value<size_t>(m_ExNetParams.m_ThreadPoolSize)->default_value("0"))
("d,input-tensor-data",
@@ -351,6 +353,12 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork",
"Perform an per byte root mean square error calculation of the output of the inference with"
" the tflite ref model.",
cxxopts::value<bool>(m_ExNetParams.m_CompareWithTflite)->default_value("false")
+ ->implicit_value("true"))
+ ("serialize-to-armnn",
+ "Serialize the loaded network to an .armnn file. This option will also serialize the optimized network"
+ " in dot format. This option only works with both the TfLite parser and the Arm NN serializer"
+ " enabled in the build. An inference will NOT be executed.",
+ cxxopts::value<bool>(m_ExNetParams.m_SerializeToArmNN)->default_value("false")
->implicit_value("true"));
m_CxxOptions.add_options("d) Optimization")