aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2018-10-23 15:14:46 +0100
committerMatthew Bentham <matthew.bentham@arm.com>2018-10-25 09:49:58 +0100
commit5cc8e56b4ca8d58dc11973c49c10a02a2f13580c (patch)
tree99f67a0652d2ee27f6cdbdc21c39001e529ee4f1
parent8853c1f6a802fe7549b89b74ed850aec455b923a (diff)
downloadarmnn-5cc8e56b4ca8d58dc11973c49c10a02a2f13580c.tar.gz
IVGCVSW-2053: Validate BackendId parameters for the InferenceTest (EndToEnd tests) / ExecuteNetwork
!referencetests:154790 Change-Id: Iaf9608ae1e66460079f2546b30d7fbc9b55bb7dd
-rw-r--r--include/armnn/ArmNN.hpp1
-rw-r--r--src/backends/RegistryCommon.hpp19
-rw-r--r--tests/CMakeLists.txt8
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp33
-rw-r--r--tests/InferenceModel.hpp50
-rw-r--r--tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp14
6 files changed, 105 insertions, 20 deletions
diff --git a/include/armnn/ArmNN.hpp b/include/armnn/ArmNN.hpp
index 6fe8b188e0..f03b79d1ca 100644
--- a/include/armnn/ArmNN.hpp
+++ b/include/armnn/ArmNN.hpp
@@ -11,6 +11,7 @@
#include "INetwork.hpp"
#include "LayerSupport.hpp"
#include "LstmParams.hpp"
+#include "Optional.hpp"
#include "Tensor.hpp"
#include "Types.hpp"
#include "TypesUtils.hpp"
diff --git a/src/backends/RegistryCommon.hpp b/src/backends/RegistryCommon.hpp
index 27663b6dea..616a63bd53 100644
--- a/src/backends/RegistryCommon.hpp
+++ b/src/backends/RegistryCommon.hpp
@@ -8,6 +8,8 @@
#include <armnn/Exceptions.hpp>
#include <functional>
#include <memory>
+#include <sstream>
+#include <string>
#include <unordered_map>
namespace armnn
@@ -65,6 +67,23 @@ public:
return result;
}
+ std::string GetBackendIdsAsString() const
+ {
+ static const std::string delimitator = ", ";
+
+ std::stringstream output;
+ for (auto& backendId : GetBackendIds())
+ {
+ if (output.tellp() != std::streampos(0))
+ {
+ output << delimitator;
+ }
+ output << backendId;
+ }
+
+ return output.str();
+ }
+
RegistryCommon() {}
virtual ~RegistryCommon() {}
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 0979d552de..50e24c9042 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -14,11 +14,13 @@ set(inference_test_sources
InferenceTestImage.cpp)
add_library_ex(inferenceTest STATIC ${inference_test_sources})
target_include_directories(inferenceTest PRIVATE ../src/armnnUtils)
+target_include_directories(inferenceTest PRIVATE ../src/backends)
if(BUILD_CAFFE_PARSER)
macro(CaffeParserTest testName sources)
add_executable_ex(${testName} ${sources})
target_include_directories(${testName} PRIVATE ../src/armnnUtils)
+ target_include_directories(${testName} PRIVATE ../src/backends)
set_target_properties(${testName} PROPERTIES COMPILE_FLAGS "${CAFFE_PARSER_TEST_ADDITIONAL_COMPILE_FLAGS}")
target_link_libraries(${testName} inferenceTest)
@@ -89,6 +91,7 @@ if(BUILD_TF_PARSER)
macro(TfParserTest testName sources)
add_executable_ex(${testName} ${sources})
target_include_directories(${testName} PRIVATE ../src/armnnUtils)
+ target_include_directories(${testName} PRIVATE ../src/backends)
target_link_libraries(${testName} inferenceTest)
target_link_libraries(${testName} armnnTfParser)
@@ -139,6 +142,7 @@ if (BUILD_TF_LITE_PARSER)
macro(TfLiteParserTest testName sources)
add_executable_ex(${testName} ${sources})
target_include_directories(${testName} PRIVATE ../src/armnnUtils)
+ target_include_directories(${testName} PRIVATE ../src/backends)
target_link_libraries(${testName} inferenceTest)
target_link_libraries(${testName} armnnTfLiteParser)
@@ -165,6 +169,7 @@ if (BUILD_ONNX_PARSER)
macro(OnnxParserTest testName sources)
add_executable_ex(${testName} ${sources})
target_include_directories(${testName} PRIVATE ../src/armnnUtils)
+ target_include_directories(${testName} PRIVATE ../src/backends)
target_link_libraries(${testName} inferenceTest)
target_link_libraries(${testName} armnnOnnxParser)
@@ -198,8 +203,9 @@ if (BUILD_CAFFE_PARSER OR BUILD_TF_PARSER OR BUILD_TF_LITE_PARSER OR BUILD_ONNX_
ExecuteNetwork/ExecuteNetwork.cpp)
add_executable_ex(ExecuteNetwork ${ExecuteNetwork_sources})
- target_include_directories(ExecuteNetwork PRIVATE ../src/armnnUtils)
target_include_directories(ExecuteNetwork PRIVATE ../src/armnn)
+ target_include_directories(ExecuteNetwork PRIVATE ../src/armnnUtils)
+ target_include_directories(ExecuteNetwork PRIVATE ../src/backends)
if (BUILD_CAFFE_PARSER)
target_link_libraries(ExecuteNetwork armnnCaffeParser)
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 7f1bcd38dc..4ebc9987a5 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -2,8 +2,7 @@
// Copyright © 2017 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//
-#include "armnn/ArmNN.hpp"
-
+#include <armnn/ArmNN.hpp>
#include <armnn/TypesUtils.hpp>
#if defined(ARMNN_CAFFE_PARSER)
@@ -166,13 +165,6 @@ void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
computeDevices.end());
}
-bool CheckDevicesAreValid(const std::vector<armnn::BackendId>& computeDevices)
-{
- return (!computeDevices.empty()
- && std::none_of(computeDevices.begin(), computeDevices.end(),
- [](armnn::BackendId c){ return c == armnn::Compute::Undefined; }));
-}
-
} // namespace
template<typename TParser, typename TDataType>
@@ -352,6 +344,10 @@ int RunCsvTest(const armnnUtils::CsvRow &csvRow,
size_t subgraphId = 0;
+ const std::string backendsMessage = std::string("The preferred order of devices to run layers on by default. ")
+ + std::string("Possible choices: ")
+ + armnn::BackendRegistryInstance().GetBackendIdsAsString();
+
po::options_description desc("Options");
try
{
@@ -361,7 +357,7 @@ int RunCsvTest(const armnnUtils::CsvRow &csvRow,
("model-path,m", po::value(&modelPath), "Path to model file, e.g. .caffemodel, .prototxt, .tflite,"
" .onnx")
("compute,c", po::value<std::vector<armnn::BackendId>>()->multitoken(),
- "The preferred order of devices to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc")
+ backendsMessage.c_str())
("input-name,i", po::value(&inputName), "Identifier of the input tensor in the network.")
("subgraph-number,n", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be "
"executed. Defaults to 0")
@@ -420,9 +416,11 @@ int RunCsvTest(const armnnUtils::CsvRow &csvRow,
RemoveDuplicateDevices(computeDevices);
// Check that the specified compute devices are valid.
- if (!CheckDevicesAreValid(computeDevices))
+ std::string invalidBackends;
+ if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
{
- BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains an invalid compute";
+ BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
+ << invalidBackends;
return EXIT_FAILURE;
}
@@ -452,6 +450,9 @@ int main(int argc, const char* argv[])
size_t subgraphId = 0;
+ const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
+ + armnn::BackendRegistryInstance().GetBackendIdsAsString();
+
po::options_description desc("Options");
try
{
@@ -467,7 +468,7 @@ int main(int argc, const char* argv[])
("model-path,m", po::value(&modelPath), "Path to model file, e.g. .caffemodel, .prototxt,"
" .tflite, .onnx")
("compute,c", po::value<std::vector<std::string>>()->multitoken(),
- "The preferred order of devices to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc")
+ backendsMessage.c_str())
("input-name,i", po::value(&inputName), "Identifier of the input tensor in the network.")
("subgraph-number,x", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be executed."
"Defaults to 0")
@@ -594,9 +595,11 @@ int main(int argc, const char* argv[])
RemoveDuplicateDevices(computeDevices);
// Check that the specified compute devices are valid.
- if (!CheckDevicesAreValid(computeDevices))
+ std::string invalidBackends;
+ if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
{
- BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains an invalid compute";
+ BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
+ << invalidBackends;
return EXIT_FAILURE;
}
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index 8645c9041a..8ef17d4df5 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -14,6 +14,8 @@
#include <armnnOnnxParser/IOnnxParser.hpp>
#endif
+#include <BackendRegistry.hpp>
+
#include <boost/exception/exception.hpp>
#include <boost/exception/diagnostic_information.hpp>
#include <boost/log/trivial.hpp>
@@ -22,11 +24,45 @@
#include <boost/filesystem.hpp>
#include <boost/lexical_cast.hpp>
+#include <fstream>
#include <map>
#include <string>
-#include <fstream>
#include <type_traits>
+namespace
+{
+
+inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
+ armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
+{
+ if (backendIds.empty())
+ {
+ return false;
+ }
+
+ armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
+
+ bool allValid = true;
+ for (const auto& backendId : backendIds)
+ {
+ if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
+ {
+ allValid = false;
+ if (invalidBackendIds)
+ {
+ if (!invalidBackendIds.value().empty())
+ {
+ invalidBackendIds.value() += ", ";
+ }
+ invalidBackendIds.value() += backendId;
+ }
+ }
+ }
+ return allValid;
+}
+
+} // anonymous namespace
+
namespace InferenceModelInternal
{
// This needs to go when the armnnCaffeParser, armnnTfParser and armnnTfLiteParser
@@ -217,12 +253,14 @@ public:
std::vector<armnn::BackendId> defaultBackends = {armnn::Compute::CpuAcc, armnn::Compute::CpuRef};
+ const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
+ + armnn::BackendRegistryInstance().GetBackendIdsAsString();
+
desc.add_options()
("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(),
"Path to directory containing model files (.caffemodel/.prototxt/.tflite)")
("compute,c", po::value<std::vector<armnn::BackendId>>(&options.m_ComputeDevice)->default_value
- (defaultBackends),
- "Which device to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc")
+ (defaultBackends), backendsMessage.c_str())
("visualize-optimized-model,v",
po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false),
"Produce a dot file useful for visualizing the graph post optimization."
@@ -246,6 +284,12 @@ public:
m_Runtime = std::move(armnn::IRuntime::Create(options));
}
+ std::string invalidBackends;
+ if (!CheckRequestedBackendsAreValid(params.m_ComputeDevice, armnn::Optional<std::string&>(invalidBackends)))
+ {
+ throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
+ }
+
armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindingInfo,
m_OutputBindingInfo);
diff --git a/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp b/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp
index f9fdf8b3ea..f31e0c95a9 100644
--- a/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp
+++ b/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp
@@ -41,6 +41,9 @@ int main(int argc, char* argv[])
std::string modelDir;
std::string dataDir;
+ const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
+ + armnn::BackendRegistryInstance().GetBackendIdsAsString();
+
po::options_description desc("Options");
try
{
@@ -50,7 +53,7 @@ int main(int argc, char* argv[])
("model-dir,m", po::value<std::string>(&modelDir)->required(),
"Path to directory containing the Cifar10 model file")
("compute,c", po::value<std::vector<armnn::BackendId>>(&computeDevice)->default_value(defaultBackends),
- "Which device to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc")
+ backendsMessage.c_str())
("data-dir,d", po::value<std::string>(&dataDir)->required(),
"Path to directory containing the Cifar10 test data");
}
@@ -91,6 +94,15 @@ int main(int argc, char* argv[])
}
string modelPath = modelDir + "cifar10_full_iter_60000.caffemodel";
+ // Check if the requested backend are all valid
+ std::string invalidBackends;
+ if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
+ {
+ BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
+ << invalidBackends;
+ return EXIT_FAILURE;
+ }
+
// Create runtime
armnn::IRuntime::CreationOptions options;
armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));