aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsurmeh01 <surabhi.mehta@arm.com>2018-03-29 16:33:54 +0100
committersurmeh01 <surabhi.mehta@arm.com>2018-03-29 16:33:54 +0100
commit7666005c72227a3ea5c410ca2861c9b6620887d8 (patch)
tree084296e0ba923f7885b8efb242335a4547b2cdb0
parent5307bc10ac488261e84ac76b2dede6039ea3fe96 (diff)
downloadandroid-nn-driver-7666005c72227a3ea5c410ca2861c9b6620887d8.tar.gz
Release 18.03
-rw-r--r--ArmnnDriver.cpp13
-rw-r--r--README.md28
-rw-r--r--Utils.cpp39
-rw-r--r--Utils.hpp3
-rwxr-xr-xsetup.sh4
-rw-r--r--test/Android.mk3
-rw-r--r--[-rwxr-xr-x]test/Tests.cpp2
-rw-r--r--test/UtilsTests.cpp261
8 files changed, 323 insertions, 30 deletions
diff --git a/ArmnnDriver.cpp b/ArmnnDriver.cpp
index 914d6560..19624649 100644
--- a/ArmnnDriver.cpp
+++ b/ArmnnDriver.cpp
@@ -372,6 +372,19 @@ Return<ErrorStatus> ArmnnDriver::prepareModel(const Model& model,
return FailPrepareModel(ErrorStatus::GENERAL_FAILURE, message.str(), cb);
}
+ // Check that the optimized network is valid.
+ if (!optNet)
+ {
+ return FailPrepareModel(ErrorStatus::GENERAL_FAILURE,
+ "ArmnnDriver::prepareModel: Invalid optimized network", cb);
+ }
+
+ // Export the optimized network graph to a dot file if an output dump directory
+ // has been specified in the drivers' arguments.
+ ExportNetworkGraphToDotFile(*optNet,
+ m_Options.GetRequestInputsAndOutputsDumpDir(),
+ model);
+
// load it into the runtime
armnn::NetworkId netId = 0;
try
diff --git a/README.md b/README.md
index f549d2c2..0dcccd78 100644
--- a/README.md
+++ b/README.md
@@ -23,36 +23,14 @@ PRODUCT_PACKAGES += android.hardware.neuralnetworks@1.0-service-armnn
</pre>
4. Build Android as normal, i.e. run `make` in `<ANDROID_ROOT>`
5. To confirm that the ArmNN driver has been built, check for driver service executable at
-<pre>
-<ANDROID_ROOT>/out/target/product/<product>/system/vendor/bin/hw/android.hardware.neuralnetworks@1.0-service-armnn
-</pre>
+`<ANDROID_ROOT>/out/target/product/<product>/system/vendor/bin/hw/android.hardware.neuralnetworks@1.0-service-armnn`
### Testing
1. Run the ArmNN driver service executable in the background
-<pre>
-adb shell /system/vendor/bin/hw/android.hardware.neuralnetworks@1.0-service-armnn &
-</pre>
+<pre>adb shell /system/vendor/bin/hw/android.hardware.neuralnetworks@1.0-service-armnn &</pre>
2. Run some code that exercises the Android Neural Networks API, for example Android's
`NeuralNetworksTest` unit tests (note this is an optional component that must be built).
-<pre>
-adb shell /data/nativetest/NeuralNetworksTest/NeuralNetworksTest > NeuralNetworkTest.log
-</pre>
+<pre>adb shell /data/nativetest/NeuralNetworksTest/NeuralNetworksTest > NeuralNetworkTest.log</pre>
3. To confirm that the ArmNN driver is being used to service the Android Neural Networks API requests,
check for messages in logcat with the `ArmnnDriver` tag.
-
-### Using ClTuner
-
-ClTuner is a feature of the Compute Library that finds optimum values for OpenCL tuning parameters. The recommended way of using it with ArmNN is to generate the tuning data during development of the Android image for a device, and use it in read-only mode during normal operation:
-
-1. Run the ArmNN driver service executable in tuning mode. The path to the tuning data must be writable by the service:
-<pre>
-adb shell /system/vendor/bin/hw/android.hardware.neuralnetworks@1.0-service-armnn --cl-tuned-parameters-file &lt;PATH_TO_TUNING_DATA&gt; --cl-tuned-parameters-mode UpdateTunedParameters &
-</pre>
-2. Run a representative set of Android NNAPI testing loads. In this mode of operation, each NNAPI workload will be slow the first time it is executed, as the tuning parameters are being selected. Subsequent executions will use the tuning data which has been generated.
-3. Stop the service.
-4. Deploy the tuned parameters file to a location readable by the ArmNN driver service (for example, to a location within /vendor/etc).
-5. During normal operation, pass the location of the tuning data to the driver service (this would normally be done by passing arguments via Android init in the service .rc definition):
-<pre>
-adb shell /system/vendor/bin/hw/android.hardware.neuralnetworks@1.0-service-armnn --cl-tuned-parameters-file &lt;PATH_TO_TUNING_DATA&gt; &
-</pre>
diff --git a/Utils.cpp b/Utils.cpp
index 33c1cd3c..01c2719b 100644
--- a/Utils.cpp
+++ b/Utils.cpp
@@ -15,6 +15,7 @@
#include <cassert>
#include <cinttypes>
#include <fstream>
+#include <iomanip>
using namespace android;
using namespace android::hidl::memory::V1_0;
@@ -270,4 +271,42 @@ void DumpTensor(const std::string& dumpDir,
}
}
+void ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
+ const std::string& dumpDir,
+ const Model& model)
+{
+ // The dump directory must exist in advance.
+ if (dumpDir.empty())
+ {
+ return;
+ }
+
+ // Get the memory address of the model and convert it to a hex string (of at least a '0' character).
+ size_t modelAddress = uintptr_t(&model);
+ std::stringstream ss;
+ ss << std::uppercase << std::hex << std::setfill('0') << std::setw(1) << modelAddress;
+ std::string modelAddressHexString = ss.str();
+
+ // Set the name of the output .dot file.
+ const std::string fileName = boost::str(boost::format("%1%/networkgraph_%2%.dot")
+ % dumpDir
+ % modelAddressHexString);
+
+ ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
+
+ // Write the network graph to a dot file.
+ std::ofstream fileStream;
+ fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
+
+ if (!fileStream.good())
+ {
+ ALOGW("Could not open file %s for writing", fileName.c_str());
+ return;
+ }
+
+ if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
+ {
+ ALOGW("An error occurred when writing to file %s", fileName.c_str());
+ }
+}
} // namespace armnn_driver
diff --git a/Utils.hpp b/Utils.hpp
index 49b8b8d8..e6b56be0 100644
--- a/Utils.hpp
+++ b/Utils.hpp
@@ -48,4 +48,7 @@ void DumpTensor(const std::string& dumpDir,
const std::string& tensorName,
const armnn::ConstTensor& tensor);
+void ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
+ const std::string& dumpDir,
+ const Model& model);
}
diff --git a/setup.sh b/setup.sh
index 91a75ec8..76b09b33 100755
--- a/setup.sh
+++ b/setup.sh
@@ -26,14 +26,14 @@ fi
if [ ! -d armnn ]; then
echo "++ Downloading armnn"
- git clone git@github.com:ARM-software/armnn.git armnn
+ git clone git@github.com:ARM-software/armnn armnn
AssertZeroExitCode "Cloning armnn failed"
fi
if [ ! -d clframework ]; then
echo "++ Downloading clframework"
- git clone git@github.com:ARM-software/ComputeLibrary.git clframework
+ git clone git@github.com:ARM-software/ComputeLibrary clframework
AssertZeroExitCode "Cloning clframework failed"
fi
diff --git a/test/Android.mk b/test/Android.mk
index 7a718afa..95de4617 100644
--- a/test/Android.mk
+++ b/test/Android.mk
@@ -27,7 +27,8 @@ LOCAL_CFLAGS := \
-UNDEBUG
LOCAL_SRC_FILES := \
- Tests.cpp
+ Tests.cpp \
+ UtilsTests.cpp
LOCAL_STATIC_LIBRARIES := \
libarmnn-driver \
diff --git a/test/Tests.cpp b/test/Tests.cpp
index 5f3dd6f6..0ab2908b 100755..100644
--- a/test/Tests.cpp
+++ b/test/Tests.cpp
@@ -194,8 +194,6 @@ private:
sp<IPreparedModel> m_PreparedModel;
};
-
-
// lifted from common/Utils.cpp
hidl_memory allocateSharedMemory(int64_t size)
{
diff --git a/test/UtilsTests.cpp b/test/UtilsTests.cpp
new file mode 100644
index 00000000..49941e56
--- /dev/null
+++ b/test/UtilsTests.cpp
@@ -0,0 +1,261 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// See LICENSE file in the project root for full license information.
+//
+
+#define LOG_TAG "ArmnnDriverUtilsTests"
+//#define BOOST_TEST_MODULE armnn_driver_utils_tests
+#include <boost/test/unit_test.hpp>
+#include <log/log.h>
+
+#include "../ArmnnDriver.hpp"
+#include "../Utils.hpp"
+
+#include <fstream>
+#include <iomanip>
+#include <boost/format.hpp>
+#include <armnn/INetwork.hpp>
+
+BOOST_AUTO_TEST_SUITE(UtilsTests)
+
+using namespace armnn_driver;
+using namespace android::nn;
+using namespace android;
+
+// The following are helpers for writing unit tests for the driver.
+namespace
+{
+
+struct ExportNetworkGraphFixture
+{
+public:
+ // Setup: set the output dump directory and an empty dummy model (as only its memory address is used).
+ // Defaulting the output dump directory to "/sdcard" because it should exist and be writable in all deployments.
+ ExportNetworkGraphFixture()
+ : ExportNetworkGraphFixture("/sdcard")
+ {}
+ ExportNetworkGraphFixture(const std::string& requestInputsAndOutputsDumpDir)
+ : m_RequestInputsAndOutputsDumpDir(requestInputsAndOutputsDumpDir)
+ , m_Model({})
+ , m_FileName()
+ , m_FileStream()
+ {
+ // Get the memory address of the model and convert it to a hex string (of at least a '0' character).
+ size_t modelAddress = uintptr_t(&m_Model);
+ std::stringstream ss;
+ ss << std::uppercase << std::hex << std::setfill('0') << std::setw(1) << modelAddress;
+ std::string modelAddressHexString = ss.str();
+
+ // Set the name of the output .dot file.
+ m_FileName = boost::str(boost::format("%1%/networkgraph_%2%.dot")
+ % m_RequestInputsAndOutputsDumpDir
+ % modelAddressHexString);
+ }
+
+ // Teardown: delete the dump file regardless of the outcome of the tests.
+ ~ExportNetworkGraphFixture()
+ {
+ // Close the file stream.
+ m_FileStream.close();
+
+ // Ignore any error (such as file not found).
+ remove(m_FileName.c_str());
+ }
+
+ bool FileExists()
+ {
+ // Close any file opened in a previous session.
+ if (m_FileStream.is_open())
+ {
+ m_FileStream.close();
+ }
+
+ // Open the file.
+ m_FileStream.open(m_FileName, std::ifstream::in);
+
+ // Check that the file is open.
+ if (!m_FileStream.is_open())
+ {
+ return false;
+ }
+
+ // Check that the stream is readable.
+ return m_FileStream.good();
+ }
+
+ std::string GetFileContent()
+ {
+ // Check that the stream is readable.
+ if (!m_FileStream.good())
+ {
+ return "";
+ }
+
+ // Get all the contents of the file.
+ return std::string((std::istreambuf_iterator<char>(m_FileStream)),
+ (std::istreambuf_iterator<char>()));
+ }
+
+ std::string m_RequestInputsAndOutputsDumpDir;
+ Model m_Model;
+
+private:
+ std::string m_FileName;
+ std::ifstream m_FileStream;
+};
+
+class MockOptimizedNetwork final : public armnn::IOptimizedNetwork
+{
+public:
+ MockOptimizedNetwork(const std::string& mockSerializedContent)
+ : m_MockSerializedContent(mockSerializedContent)
+ {}
+ ~MockOptimizedNetwork() {}
+
+ armnn::Status PrintGraph() override { return armnn::Status::Failure; }
+ armnn::Status SerializeToDot(std::ostream& stream) const override
+ {
+ stream << m_MockSerializedContent;
+
+ return stream.good() ? armnn::Status::Success : armnn::Status::Failure;
+ }
+
+ void UpdateMockSerializedContent(const std::string& mockSerializedContent)
+ {
+ this->m_MockSerializedContent = mockSerializedContent;
+ }
+
+private:
+ std::string m_MockSerializedContent;
+};
+
+} // namespace
+
+BOOST_AUTO_TEST_CASE(ExportToEmptyDirectory)
+{
+ // Set the fixture for this test.
+ ExportNetworkGraphFixture fixture("");
+
+ // Set a mock content for the optimized network.
+ std::string mockSerializedContent = "This is a mock serialized content.";
+
+ // Set a mock optimized network.
+ MockOptimizedNetwork mockOptimizedNetwork(mockSerializedContent);
+
+ // Export the mock optimized network.
+ armnn_driver::ExportNetworkGraphToDotFile(mockOptimizedNetwork,
+ fixture.m_RequestInputsAndOutputsDumpDir,
+ fixture.m_Model);
+
+ // Check that the output file does not exist.
+ BOOST_TEST(!fixture.FileExists());
+}
+
+BOOST_AUTO_TEST_CASE(ExportNetwork)
+{
+ // Set the fixture for this test.
+ ExportNetworkGraphFixture fixture;
+
+ // Set a mock content for the optimized network.
+ std::string mockSerializedContent = "This is a mock serialized content.";
+
+ // Set a mock optimized network.
+ MockOptimizedNetwork mockOptimizedNetwork(mockSerializedContent);
+
+ // Export the mock optimized network.
+ armnn_driver::ExportNetworkGraphToDotFile(mockOptimizedNetwork,
+ fixture.m_RequestInputsAndOutputsDumpDir,
+ fixture.m_Model);
+
+ // Check that the output file exists and that it has the correct name.
+ BOOST_TEST(fixture.FileExists());
+
+ // Check that the content of the output file matches the mock content.
+ BOOST_TEST(fixture.GetFileContent() == mockSerializedContent);
+}
+
+BOOST_AUTO_TEST_CASE(ExportNetworkOverwriteFile)
+{
+ // Set the fixture for this test.
+ ExportNetworkGraphFixture fixture;
+
+ // Set a mock content for the optimized network.
+ std::string mockSerializedContent = "This is a mock serialized content.";
+
+ // Set a mock optimized network.
+ MockOptimizedNetwork mockOptimizedNetwork(mockSerializedContent);
+
+ // Export the mock optimized network.
+ armnn_driver::ExportNetworkGraphToDotFile(mockOptimizedNetwork,
+ fixture.m_RequestInputsAndOutputsDumpDir,
+ fixture.m_Model);
+
+ // Check that the output file exists and that it has the correct name.
+ BOOST_TEST(fixture.FileExists());
+
+ // Check that the content of the output file matches the mock content.
+ BOOST_TEST(fixture.GetFileContent() == mockSerializedContent);
+
+ // Update the mock serialized content of the network.
+ mockSerializedContent = "This is ANOTHER mock serialized content!";
+ mockOptimizedNetwork.UpdateMockSerializedContent(mockSerializedContent);
+
+ // Export the mock optimized network.
+ armnn_driver::ExportNetworkGraphToDotFile(mockOptimizedNetwork,
+ fixture.m_RequestInputsAndOutputsDumpDir,
+ fixture.m_Model);
+
+ // Check that the output file still exists and that it has the correct name.
+ BOOST_TEST(fixture.FileExists());
+
+ // Check that the content of the output file matches the mock content.
+ BOOST_TEST(fixture.GetFileContent() == mockSerializedContent);
+}
+
+BOOST_AUTO_TEST_CASE(ExportMultipleNetworks)
+{
+ // Set the fixtures for this test.
+ ExportNetworkGraphFixture fixture1;
+ ExportNetworkGraphFixture fixture2;
+ ExportNetworkGraphFixture fixture3;
+
+ // Set a mock content for the optimized network.
+ std::string mockSerializedContent = "This is a mock serialized content.";
+
+ // Set a mock optimized network.
+ MockOptimizedNetwork mockOptimizedNetwork(mockSerializedContent);
+
+ // Export the mock optimized network.
+ armnn_driver::ExportNetworkGraphToDotFile(mockOptimizedNetwork,
+ fixture1.m_RequestInputsAndOutputsDumpDir,
+ fixture1.m_Model);
+
+ // Check that the output file exists and that it has the correct name.
+ BOOST_TEST(fixture1.FileExists());
+
+ // Check that the content of the output file matches the mock content.
+ BOOST_TEST(fixture1.GetFileContent() == mockSerializedContent);
+
+ // Export the mock optimized network.
+ armnn_driver::ExportNetworkGraphToDotFile(mockOptimizedNetwork,
+ fixture2.m_RequestInputsAndOutputsDumpDir,
+ fixture2.m_Model);
+
+ // Check that the output file exists and that it has the correct name.
+ BOOST_TEST(fixture2.FileExists());
+
+ // Check that the content of the output file matches the mock content.
+ BOOST_TEST(fixture2.GetFileContent() == mockSerializedContent);
+
+ // Export the mock optimized network.
+ armnn_driver::ExportNetworkGraphToDotFile(mockOptimizedNetwork,
+ fixture3.m_RequestInputsAndOutputsDumpDir,
+ fixture3.m_Model);
+ // Check that the output file exists and that it has the correct name.
+ BOOST_TEST(fixture3.FileExists());
+
+ // Check that the content of the output file matches the mock content.
+ BOOST_TEST(fixture3.GetFileContent() == mockSerializedContent);
+}
+
+BOOST_AUTO_TEST_SUITE_END()