From 0aef653469eebbdf88308b7fbc6bb78452d380d0 Mon Sep 17 00:00:00 2001 From: Colm Donelan Date: Mon, 2 Oct 2023 17:01:37 +0100 Subject: IVGCVSW-7731 Add a test executables that verify released header files. Create a CMake project with executables to exercise the external interfaces of Arm NN. Signed-off-by: Colm Donelan Change-Id: I1e3a8ed726903aac5f52d78c55d2e1b1352c8362 --- tests/InterfaceTests/OpaqueDelegateTest.cpp | 67 +++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 tests/InterfaceTests/OpaqueDelegateTest.cpp (limited to 'tests/InterfaceTests/OpaqueDelegateTest.cpp') diff --git a/tests/InterfaceTests/OpaqueDelegateTest.cpp b/tests/InterfaceTests/OpaqueDelegateTest.cpp new file mode 100644 index 0000000000..240a295393 --- /dev/null +++ b/tests/InterfaceTests/OpaqueDelegateTest.cpp @@ -0,0 +1,67 @@ +// +// Copyright © 2023 Arm Ltd and Contributors. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include + +#include +#include +#include +#include + +int main() +{ + std::unique_ptr model; + model = tflite::FlatBufferModel::BuildFromFile("./simple_conv2d_1_op.tflite"); + if (!model) + { + std::cout << "Failed to load TfLite model from: ./simple_conv2d_1_op.tflite" << std::endl; + return -1; + } + std::unique_ptr m_TfLiteInterpreter; + m_TfLiteInterpreter = std::make_unique(); + tflite::ops::builtin::BuiltinOpResolver resolver; + tflite::InterpreterBuilder builder(*model, resolver); + if (builder(&m_TfLiteInterpreter) != kTfLiteOk) + { + std::cout << "Error loading the model into the TfLiteInterpreter." << std::endl; + return -1; + } + // Use default settings until options have been enabled + flatbuffers::FlatBufferBuilder flatBufferBuilder; + tflite::TFLiteSettingsBuilder tfliteSettingsBuilder(flatBufferBuilder); + flatbuffers::Offset tfliteSettings = tfliteSettingsBuilder.Finish(); + flatBufferBuilder.Finish(tfliteSettings); + const tflite::TFLiteSettings* settings = + flatbuffers::GetRoot(flatBufferBuilder.GetBufferPointer()); + + std::unique_ptr delegatePlugIn = + tflite::delegates::DelegatePluginRegistry::CreateByName("armnn_delegate", *settings); + + // Create Armnn Opaque Delegate from Armnn Delegate Plugin + tflite::delegates::TfLiteDelegatePtr armnnDelegate = delegatePlugIn->Create(); + + // Add Delegate to the builder + builder.AddDelegate(armnnDelegate.get()); + if (builder(&m_TfLiteInterpreter) != kTfLiteOk) + { + std::cout << "Unable to add the Arm NN delegate to the TfLite runtime." << std::endl; + return -1; + } + + if (m_TfLiteInterpreter->AllocateTensors() != kTfLiteOk) + { + std::cout << "Failed to allocate tensors in the TfLiteInterpreter." << std::endl; + return -1; + } + + // Really should populate the tensors here, but it'll work without it. + + int status = m_TfLiteInterpreter->Invoke(); + if (status != kTfLiteOk) + { + std::cout << "Inference failed." << std::endl; + return -1; + } +} -- cgit v1.2.1