aboutsummaryrefslogtreecommitdiff
path: root/tests/InterfaceTests/OpaqueDelegateTest.cpp
diff options
context:
space:
mode:
authorColm Donelan <colm.donelan@arm.com>2023-10-02 17:01:37 +0100
committerColm Donelan <colm.donelan@arm.com>2023-10-19 07:27:02 +0000
commit0aef653469eebbdf88308b7fbc6bb78452d380d0 (patch)
tree868fb9ec011d5c4f05950e8bca5e54c92e645358 /tests/InterfaceTests/OpaqueDelegateTest.cpp
parentb41793a9f9afc43fb04a991ca819818fca8faab8 (diff)
downloadarmnn-0aef653469eebbdf88308b7fbc6bb78452d380d0.tar.gz
IVGCVSW-7731 Add a test executables that verify released header files.
Create a CMake project with executables to exercise the external interfaces of Arm NN. Signed-off-by: Colm Donelan <colm.donelan@arm.com> Change-Id: I1e3a8ed726903aac5f52d78c55d2e1b1352c8362
Diffstat (limited to 'tests/InterfaceTests/OpaqueDelegateTest.cpp')
-rw-r--r--tests/InterfaceTests/OpaqueDelegateTest.cpp67
1 files changed, 67 insertions, 0 deletions
diff --git a/tests/InterfaceTests/OpaqueDelegateTest.cpp b/tests/InterfaceTests/OpaqueDelegateTest.cpp
new file mode 100644
index 0000000000..240a295393
--- /dev/null
+++ b/tests/InterfaceTests/OpaqueDelegateTest.cpp
@@ -0,0 +1,67 @@
+//
+// Copyright © 2023 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include <armnn_delegate.hpp>
+
+#include <tensorflow/lite/c/common.h>
+#include <tensorflow/lite/core/model.h>
+#include <tensorflow/lite/interpreter.h>
+#include <tensorflow/lite/kernels/register.h>
+
+int main()
+{
+ std::unique_ptr<tflite::FlatBufferModel> model;
+ model = tflite::FlatBufferModel::BuildFromFile("./simple_conv2d_1_op.tflite");
+ if (!model)
+ {
+ std::cout << "Failed to load TfLite model from: ./simple_conv2d_1_op.tflite" << std::endl;
+ return -1;
+ }
+ std::unique_ptr<tflite::Interpreter> m_TfLiteInterpreter;
+ m_TfLiteInterpreter = std::make_unique<tflite::Interpreter>();
+ tflite::ops::builtin::BuiltinOpResolver resolver;
+ tflite::InterpreterBuilder builder(*model, resolver);
+ if (builder(&m_TfLiteInterpreter) != kTfLiteOk)
+ {
+ std::cout << "Error loading the model into the TfLiteInterpreter." << std::endl;
+ return -1;
+ }
+ // Use default settings until options have been enabled
+ flatbuffers::FlatBufferBuilder flatBufferBuilder;
+ tflite::TFLiteSettingsBuilder tfliteSettingsBuilder(flatBufferBuilder);
+ flatbuffers::Offset<tflite::TFLiteSettings> tfliteSettings = tfliteSettingsBuilder.Finish();
+ flatBufferBuilder.Finish(tfliteSettings);
+ const tflite::TFLiteSettings* settings =
+ flatbuffers::GetRoot<tflite::TFLiteSettings>(flatBufferBuilder.GetBufferPointer());
+
+ std::unique_ptr<tflite::delegates::DelegatePluginInterface> delegatePlugIn =
+ tflite::delegates::DelegatePluginRegistry::CreateByName("armnn_delegate", *settings);
+
+ // Create Armnn Opaque Delegate from Armnn Delegate Plugin
+ tflite::delegates::TfLiteDelegatePtr armnnDelegate = delegatePlugIn->Create();
+
+ // Add Delegate to the builder
+ builder.AddDelegate(armnnDelegate.get());
+ if (builder(&m_TfLiteInterpreter) != kTfLiteOk)
+ {
+ std::cout << "Unable to add the Arm NN delegate to the TfLite runtime." << std::endl;
+ return -1;
+ }
+
+ if (m_TfLiteInterpreter->AllocateTensors() != kTfLiteOk)
+ {
+ std::cout << "Failed to allocate tensors in the TfLiteInterpreter." << std::endl;
+ return -1;
+ }
+
+ // Really should populate the tensors here, but it'll work without it.
+
+ int status = m_TfLiteInterpreter->Invoke();
+ if (status != kTfLiteOk)
+ {
+ std::cout << "Inference failed." << std::endl;
+ return -1;
+ }
+}