From 80fbcd5f4d7b362360963af1df0121aa6b561576 Mon Sep 17 00:00:00 2001 From: Matthew Sloyan Date: Thu, 7 Jan 2021 13:28:47 +0000 Subject: IVGCVSW-5483 'Implement Loading and Saving to File' * Implemented Serialization and Deserialization of CLContext. * Fixed flatbuffers android-nn-driver dependency. !android-nn-driver:4772 Signed-off-by: Matthew Sloyan Signed-off-by: Sadik Armagan Change-Id: If806f050535ffaa70922ba0f1ffe7bb10f902329 --- src/backends/cl/test/ClContextSerializerTests.cpp | 138 ++++++++++++++++++++++ 1 file changed, 138 insertions(+) create mode 100644 src/backends/cl/test/ClContextSerializerTests.cpp (limited to 'src/backends/cl/test/ClContextSerializerTests.cpp') diff --git a/src/backends/cl/test/ClContextSerializerTests.cpp b/src/backends/cl/test/ClContextSerializerTests.cpp new file mode 100644 index 0000000000..1fc0fb9205 --- /dev/null +++ b/src/backends/cl/test/ClContextSerializerTests.cpp @@ -0,0 +1,138 @@ +// +// Copyright © 2020 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include + +#include + +#include + +#include + +namespace +{ + +armnn::INetworkPtr CreateNetwork() +{ + // Builds up the structure of the network. + armnn::INetworkPtr net(armnn::INetwork::Create()); + + armnn::IConnectableLayer* input = net->AddInputLayer(0, "input"); + armnn::IConnectableLayer* softmax = net->AddSoftmaxLayer(armnn::SoftmaxDescriptor(), "softmax"); + armnn::IConnectableLayer* output = net->AddOutputLayer(0, "output"); + + input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0)); + softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0)); + + // Sets the input and output tensors + armnn::TensorInfo inputTensorInfo(armnn::TensorShape({1, 5}), armnn::DataType::QAsymmU8, 10000.0f, 1); + input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo); + + armnn::TensorInfo outputTensorInfo(armnn::TensorShape({1, 5}), armnn::DataType::QAsymmU8, 1.0f/255.0f, 0); + softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo); + + return net; +} + +void RunInference(armnn::NetworkId& netId, armnn::IRuntimePtr& runtime, std::vector& outputData) +{ + // Creates structures for input & output. + std::vector inputData + { + 1, 10, 3, 200, 5 // Some inputs - one of which is sufficiently larger than the others to saturate softmax. + }; + + armnn::InputTensors inputTensors + { + {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())} + }; + + armnn::OutputTensors outputTensors + { + {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())} + }; + + // Run inference. + runtime->EnqueueWorkload(netId, inputTensors, outputTensors); +} + +std::vector ReadBinaryFile(const std::string& binaryFileName) +{ + std::ifstream input(binaryFileName, std::ios::binary); + return std::vector(std::istreambuf_iterator(input), {}); +} + +} // anonymous namespace + +BOOST_FIXTURE_TEST_SUITE(ClContextSerializer, ClContextControlFixture) + +BOOST_AUTO_TEST_CASE(ClContextSerializerTest) +{ + // Get tmp directory and create blank file. + fs::path filePath = armnnUtils::Filesystem::NamedTempFile("Armnn-CachedNetworkFileTest-TempFile.bin"); + std::string const filePathString{filePath.string()}; + std::ofstream file { filePathString }; + + // Create runtime in which test will run + armnn::IRuntime::CreationOptions options; + armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options)); + + std::vector backends = {armnn::Compute::GpuAcc}; + + // Create two networks. + // net1 will serialize and save context to file. + // net2 will deserialize context saved from net1 and load. + armnn::INetworkPtr net1 = CreateNetwork(); + armnn::INetworkPtr net2 = CreateNetwork(); + + // Add specific optimizerOptions to each network. + armnn::OptimizerOptions optimizerOptions1; + armnn::OptimizerOptions optimizerOptions2; + armnn::BackendOptions modelOptions1("GpuAcc", + {{"SaveCachedNetwork", true}, {"CachedNetworkFilePath", filePathString}}); + armnn::BackendOptions modelOptions2("GpuAcc", + {{"SaveCachedNetwork", false}, {"CachedNetworkFilePath", filePathString}}); + optimizerOptions1.m_ModelOptions.push_back(modelOptions1); + optimizerOptions2.m_ModelOptions.push_back(modelOptions2); + + armnn::IOptimizedNetworkPtr optNet1 = armnn::Optimize( + *net1, backends, runtime->GetDeviceSpec(), optimizerOptions1); + armnn::IOptimizedNetworkPtr optNet2 = armnn::Optimize( + *net2, backends, runtime->GetDeviceSpec(), optimizerOptions2); + BOOST_CHECK(optNet1); + BOOST_CHECK(optNet2); + + // Cached file should be empty until net1 is loaded into runtime. + BOOST_TEST(fs::is_empty(filePathString)); + + // Load net1 into the runtime. + armnn::NetworkId netId1; + BOOST_TEST(runtime->LoadNetwork(netId1, std::move(optNet1)) == armnn::Status::Success); + + // File should now exist and not be empty. It has been serialized. + BOOST_TEST(fs::exists(filePathString)); + std::vector dataSerialized = ReadBinaryFile(filePathString); + BOOST_TEST(dataSerialized.size() != 0); + + // Load net2 into the runtime using file and deserialize. + armnn::NetworkId netId2; + BOOST_TEST(runtime->LoadNetwork(netId2, std::move(optNet2)) == armnn::Status::Success); + + // Run inference and get output data. + std::vector outputData1(5); + RunInference(netId1, runtime, outputData1); + + std::vector outputData2(5); + RunInference(netId2, runtime, outputData2); + + // Compare outputs from both networks. + BOOST_CHECK_EQUAL_COLLECTIONS(outputData1.begin(), outputData1.end(), + outputData2.begin(), outputData2.end()); + + // Remove temp file created. + fs::remove(filePath); +} + +BOOST_AUTO_TEST_SUITE_END() -- cgit v1.2.1