aboutsummaryrefslogtreecommitdiff
path: root/tests/ExecuteNetwork
diff options
context:
space:
mode:
authorAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-01-24 17:05:36 +0000
committerAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-01-30 11:25:56 +0000
commit7cf0eaa26c1fb29ca9df97e4734ec7c1e10f81c4 (patch)
tree407b519ede76b235c54907fe80411970741e8a00 /tests/ExecuteNetwork
parent28d3d63cc0a33f8396b32fa8347c03912c065911 (diff)
downloadarmnn-7cf0eaa26c1fb29ca9df97e4734ec7c1e10f81c4.tar.gz
IVGCVSW-2564 Add support for multiple input and output bindings in InferenceModel
Change-Id: I64d724367d42dca4b768b6c6e42acda714985950
Diffstat (limited to 'tests/ExecuteNetwork')
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp37
1 files changed, 21 insertions, 16 deletions
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index dd769755b4..d783a0e2cf 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -137,13 +137,12 @@ std::vector<unsigned int> ParseArray(std::istream& stream)
[](const std::string& s) { return boost::numeric_cast<unsigned int>(std::stoi(s)); });
}
-void PrintArray(const std::vector<float>& v)
+void PrintOutputData(const std::string& outputLayerName, const std::vector<float>& data)
{
- for (size_t i = 0; i < v.size(); i++)
- {
- printf("%f ", v[i]);
- }
- printf("\n");
+ std::cout << outputLayerName << ": ";
+ std::copy(data.begin(), data.end(),
+ std::ostream_iterator<float>(std::cout, " "));
+ std::cout << std::endl;
}
void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
@@ -179,8 +178,10 @@ int MainImpl(const char* modelPath,
const size_t subgraphId,
const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
{
+ using TContainer = std::vector<TDataType>;
+
// Loads input tensor.
- std::vector<TDataType> input;
+ TContainer inputDataContainer;
{
std::ifstream inputTensorFile(inputTensorDataFilePath);
if (!inputTensorFile.good())
@@ -188,7 +189,7 @@ int MainImpl(const char* modelPath,
BOOST_LOG_TRIVIAL(fatal) << "Failed to load input tensor data file from " << inputTensorDataFilePath;
return EXIT_FAILURE;
}
- input = ParseArray<TDataType>(inputTensorFile);
+ inputDataContainer = ParseArray<TDataType>(inputTensorFile);
}
try
@@ -198,19 +199,23 @@ int MainImpl(const char* modelPath,
params.m_ModelPath = modelPath;
params.m_IsModelBinary = isModelBinary;
params.m_ComputeDevice = computeDevice;
- params.m_InputBinding = inputName;
- params.m_InputTensorShape = inputTensorShape;
- params.m_OutputBinding = outputName;
+ params.m_InputBindings = { inputName };
+ params.m_InputShapes = { *inputTensorShape };
+ params.m_OutputBindings = { outputName };
params.m_EnableProfiling = enableProfiling;
params.m_SubgraphId = subgraphId;
InferenceModel<TParser, TDataType> model(params, runtime);
- // Executes the model.
- std::vector<TDataType> output(model.GetOutputSize());
- model.Run(input, output);
+ // Executes the model
+ const size_t numOutputs = params.m_OutputBindings.size();
+ std::vector<TContainer> outputDataContainers(numOutputs);
+ model.Run({ inputDataContainer }, outputDataContainers);
- // Prints the output tensor.
- PrintArray(output);
+ // Print output tensors
+ for (size_t i = 0; i < numOutputs; i++)
+ {
+ PrintOutputData(params.m_OutputBindings[i], outputDataContainers[i]);
+ }
}
catch (armnn::Exception const& e)
{