aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorColm Donelan <colm.donelan@arm.com>2022-04-19 16:21:59 +0100
committerColm Donelan <colm.donelan@arm.com>2022-04-21 08:42:26 +0000
commit643355be8a8e94edb66e4586b406343d4342382b (patch)
tree64ccd3c8eea55614eee3e70f58c85736413aff90
parent78d768c929a337b02b23be76571c651ac8e0be02 (diff)
downloadarmnn-643355be8a8e94edb66e4586b406343d4342382b.tar.gz
IVGCVSW-6755 Integrate DriverOptions to arm-armnn-sl
* Modify DriverOptions to parse an environment variable. * Update support library service to use the new driver options. * Added minimal README file to describe usage. Signed-off-by: Colm Donelan <colm.donelan@arm.com> Change-Id: Ifc2847a32bf8009c61df4465accd1b1df31f45db
-rw-r--r--shim/sl/README.md38
-rw-r--r--shim/sl/canonical/ArmnnDevice.cpp14
-rw-r--r--shim/sl/canonical/DriverOptions.cpp59
-rw-r--r--shim/sl/canonical/DriverOptions.hpp2
-rw-r--r--shim/sl/support_library_service.cpp5
5 files changed, 93 insertions, 25 deletions
diff --git a/shim/sl/README.md b/shim/sl/README.md
new file mode 100644
index 0000000000..46509656f7
--- /dev/null
+++ b/shim/sl/README.md
@@ -0,0 +1,38 @@
+# Arm NN Support Library Neural Networks driver
+
+This directory contains the Arm NN Support Library for the Android Neural Networks API.
+
+# Passing parameters to the support library runtime.
+
+The support library inherits it's parameters from the Arm NN Android Neural Networks driver. Parameters are passed to it through an environment variable, ARMNN_SL_OPTIONS. A full list of parameters are available ./canonical/DriverOptions.cpp.
+
+# Sample usage
+
+## Running NeuralNetworksSupportLibraryTest
+
+This test suite takes as it's first argument the path to a shared object implementation of the support library. Any library dependencies should be resolvable through the LD_LIBRARY_PATH mechanism. Setting ARMNN_SL_OPTIONS will pass parameters to the Arm NN Support Library Neural Networks driver.
+
+Here we assume that Bash is the current shell and specify "-v" to enable verbose logging and "-c CpuAcc" to direct that the Neon(TM) accelerator be used.
+~~~
+ARMNN_SL_OPTIONS="-v -c CpuAcc" ./NeuralNetworksSupportLibraryTest ./libarmnn_support_library.so
+~~~
+
+## Running TfLite Benchmarking tool
+
+This tools' parameters are described [here](https://www.tensorflow.org/lite/performance/measurement). The support library specific parts are to specify the path to the library and to ensure that ARMNN_SL_OPTIONS is set in the environment.
+
+support for relaxed computation from Float32 to Float16"
+~~~
+ARMNN_SL_OPTIONS="-v -c GpuAcc -f" ./android_aarch64_benchmark_model --graph=./mymodel.tflite --num_threads=1 --use_nnapi=true --num_runs=1 --nnapi_support_library_path=./libarmnn_support_library.so --nnapi_accelerator_name=arm-armnn-sl
+~~~
+
+### License
+
+The Arm NN Support Library Neural Networks driver is provided under the [MIT](https://spdx.org/licenses/MIT.html) license.
+See [LICENSE](LICENSE) for more information. Contributions to this project are accepted under the same license.
+
+Individual files contain the following tag instead of the full license text.
+
+ SPDX-License-Identifier: MIT
+
+This enables machine processing of license information based on the SPDX License Identifiers that are available here: http://spdx.org/licenses/
diff --git a/shim/sl/canonical/ArmnnDevice.cpp b/shim/sl/canonical/ArmnnDevice.cpp
index 2fc7ff1d12..3fd192630b 100644
--- a/shim/sl/canonical/ArmnnDevice.cpp
+++ b/shim/sl/canonical/ArmnnDevice.cpp
@@ -40,6 +40,20 @@ ArmnnDevice::ArmnnDevice(DriverOptions options)
, m_ClTunedParameters(nullptr)
, m_Options(std::move(options))
{
+ // First check if the DriverOptions is happy.
+ if (options.ShouldExit())
+ {
+ // Is this a good or bad exit?
+ if (options.GetExitCode() != EXIT_SUCCESS)
+ {
+ throw armnn::InvalidArgumentException("ArmnnDevice: Insufficient or illegal options specified.");
+ }
+ else
+ {
+ throw armnn::InvalidArgumentException("ArmnnDevice: Nothing to do.");
+ }
+ }
+
initVLogMask();
VLOG(DRIVER) << "ArmnnDevice::ArmnnDevice()";
diff --git a/shim/sl/canonical/DriverOptions.cpp b/shim/sl/canonical/DriverOptions.cpp
index 3e26959dc1..5c73edfaa6 100644
--- a/shim/sl/canonical/DriverOptions.cpp
+++ b/shim/sl/canonical/DriverOptions.cpp
@@ -71,7 +71,10 @@ DriverOptions::DriverOptions(const std::vector<armnn::BackendId>& backends, bool
{
}
-DriverOptions::DriverOptions(int argc, char** argv)
+// This default constructor will example an environment variable called
+// ARMNN_SL_OPTIONS. It will parse the parameters using the existing cxx
+// opts mechanism.
+DriverOptions::DriverOptions()
: m_VerboseLogging(false)
, m_RequestInputsAndOutputsDumpDir(std::string(""))
, m_ServiceName(std::string("armnn_sl"))
@@ -91,11 +94,37 @@ DriverOptions::DriverOptions(int argc, char** argv)
std::string clTunedParametersModeAsString;
std::string clTuningLevelAsString;
std::vector<std::string> backends;
- bool showHelp;
- bool showVersion;
+ bool showHelp = false;
+ bool showVersion = false;
- cxxopts::Options optionsDesc(argv[0], "ArmNN Android NN driver for the Android Neural Networks API."
- "The Android NN driver will convert Android NNAPI requests "
+ const char* rawEnv = std::getenv("ARMNN_SL_OPTIONS");
+ // If the environment variable isn't set we'll continue as if it were an empty string.
+ if (!rawEnv)
+ {
+ rawEnv = "";
+ }
+ string optionsAsString(rawEnv);
+ regex whiteSpaceRegex("\\s+");
+ // Tokienize the string based on whitespace.
+ sregex_token_iterator iter(optionsAsString.begin(), optionsAsString.end(), whiteSpaceRegex, -1);
+ sregex_token_iterator end;
+ vector<string> cliAsVector(iter, end);
+ // As we're pretending to be a command line, argv[0] should be an executable name.
+ cliAsVector.insert(cliAsVector.begin(), "ARMNN_SL_OPTIONS");
+ // Convert the vector of string to a vector of char* backed by the existing vector.
+ std::vector<char*> argVector;
+ for (const auto& arg : cliAsVector)
+ {
+ argVector.push_back((char*)arg.data());
+ }
+ // Terminate the array.
+ argVector.push_back(nullptr);
+ // Create usable variables.
+ int argc = argVector.size() - 1; // Ignore the null pointer at the end.
+ char** argv = argVector.data();
+
+ cxxopts::Options optionsDesc(argv[0], "Arm NN Support Library for the Android Neural Networks API."
+ "The support library will convert Android NNAPI requests "
"and delegate them to available ArmNN backends.");
try
{
@@ -118,7 +147,7 @@ DriverOptions::DriverOptions(int argc, char** argv)
cxxopts::value<bool>(m_fp16Enabled)->default_value("false"))
("h,help", "Show this help",
- cxxopts::value<bool>(showHelp)->default_value("false"))
+ cxxopts::value<bool>(showHelp)->default_value("false")->implicit_value("true"))
("m,cl-tuned-parameters-mode",
"If 'UseTunedParameters' (the default), will read CL tuned parameters from the file specified by "
@@ -131,10 +160,6 @@ DriverOptions::DriverOptions(int argc, char** argv)
"If non-empty, the given file will be used to load/save MLGO CL tuned parameters. ",
cxxopts::value<std::string>(m_ClMLGOTunedParametersFile)->default_value(""))
- ("n,service-name",
- "If non-empty, the driver service name to be registered",
- cxxopts::value<std::string>(m_ServiceName)->default_value("armnn_sl"))
-
("o,cl-tuning-level",
"exhaustive: all lws values are tested "
"normal: reduced number of lws values but enough to still have the performance really close to the "
@@ -173,19 +198,11 @@ DriverOptions::DriverOptions(int argc, char** argv)
cxxopts::value<std::string>(unsupportedOperationsAsString)->default_value(""))
("v,verbose-logging", "Turns verbose logging on",
- cxxopts::value<bool>(m_VerboseLogging)->default_value("false"))
+ cxxopts::value<bool>(m_VerboseLogging)->default_value("false")->implicit_value("true"))
("V,version", "Show version information",
- cxxopts::value<bool>(showVersion)->default_value("false"))
-
- ("A,asyncModelExecution", "Enable AsynModel Execution",
- cxxopts::value<bool>(m_EnableAsyncModelExecution)->default_value("false"))
-
- ("T,armnn-threads",
- "Assign the number of threads used by ArmNN. "
- "Input value must be at least 1. "
- "Default is set to 1.",
- cxxopts::value<unsigned int>(m_ArmnnNumberOfThreads)->default_value("1"));
+ cxxopts::value<bool>(showVersion)->default_value("false")->implicit_value("true"))
+ ;
}
catch (const std::exception& e)
{
diff --git a/shim/sl/canonical/DriverOptions.hpp b/shim/sl/canonical/DriverOptions.hpp
index 6b42f4ecca..4c6b385a48 100644
--- a/shim/sl/canonical/DriverOptions.hpp
+++ b/shim/sl/canonical/DriverOptions.hpp
@@ -19,7 +19,7 @@ class DriverOptions
public:
DriverOptions(armnn::Compute computeDevice, bool fp16Enabled = false);
DriverOptions(const std::vector<armnn::BackendId>& backends, bool fp16Enabled = false);
- DriverOptions(int argc, char** argv);
+ DriverOptions();
DriverOptions(DriverOptions&& other) = default;
diff --git a/shim/sl/support_library_service.cpp b/shim/sl/support_library_service.cpp
index 6b8167aeaa..14556fd573 100644
--- a/shim/sl/support_library_service.cpp
+++ b/shim/sl/support_library_service.cpp
@@ -12,8 +12,7 @@ namespace android::nn
std::vector<SharedDevice> getDevices()
{
- std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, armnn::Compute::CpuRef, armnn::Compute::GpuAcc };
- return { std::make_shared<armnn_driver::ArmnnDriver>(DriverOptions(backends)) };
+ return { std::make_shared<armnn_driver::ArmnnDriver>(DriverOptions()) };
}
-} // namespace android::nn \ No newline at end of file
+} // namespace android::nn