diff options
author | Narumol Prangnawarat <narumol.prangnawarat@arm.com> | 2023-05-05 16:39:05 +0100 |
---|---|---|
committer | Francis Murtagh <francis.murtagh@arm.com> | 2023-05-08 13:19:15 +0000 |
commit | 46e574e8195ae2e8085086457a642210d9d3b8cd (patch) | |
tree | b3a7b2bdc86b222509f5110c177a82646a58797b /tests/ExecuteNetwork/TfliteExecutor.cpp | |
parent | 97a3aefff63ae081ae62aa5bac17d6e9c401937e (diff) | |
download | armnn-46e574e8195ae2e8085086457a642210d9d3b8cd.tar.gz |
IVGCVSW-7626 Add Execute Network for Opaque Delegate
Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com>
Change-Id: Ibdded86713368ecfdf31c4118dfe8a3404d1e3b8
Diffstat (limited to 'tests/ExecuteNetwork/TfliteExecutor.cpp')
-rw-r--r-- | tests/ExecuteNetwork/TfliteExecutor.cpp | 35 |
1 files changed, 34 insertions, 1 deletions
diff --git a/tests/ExecuteNetwork/TfliteExecutor.cpp b/tests/ExecuteNetwork/TfliteExecutor.cpp index 87731c2f83..04f6ddb72a 100644 --- a/tests/ExecuteNetwork/TfliteExecutor.cpp +++ b/tests/ExecuteNetwork/TfliteExecutor.cpp @@ -3,6 +3,11 @@ // SPDX-License-Identifier: MIT // +#if defined(ARMNN_TFLITE_OPAQUE_DELEGATE) +#include <../delegate/opaque/include/armnn_delegate.hpp> +#endif + +#include <tensorflow/lite/core/c/c_api.h> #include "TfliteExecutor.hpp" #include "tensorflow/lite/kernels/kernel_util.h" @@ -26,8 +31,33 @@ TfLiteExecutor::TfLiteExecutor(const ExecuteNetworkParams& params, armnn::IRunti { LogAndThrow("Failed to allocate tensors in the TfLiteInterpreter."); } - if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate) + + if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteOpaqueDelegate) + { +#if defined(ARMNN_TFLITE_OPAQUE_DELEGATE) + // Use default settings until options have been enabled + flatbuffers::FlatBufferBuilder flatBufferBuilder; + TFLiteSettingsBuilder tfliteSettingsBuilder(flatBufferBuilder); + flatbuffers::Offset<TFLiteSettings> tfliteSettings = tfliteSettingsBuilder.Finish(); + flatBufferBuilder.Finish(tfliteSettings); + const TFLiteSettings* settings = + flatbuffers::GetRoot<TFLiteSettings>(flatBufferBuilder.GetBufferPointer()); + + std::unique_ptr<delegates::DelegatePluginInterface> delegatePlugIn = + delegates::DelegatePluginRegistry::CreateByName("armnn_delegate", *settings); + + // Create Armnn Opaque Delegate from Armnn Delegate Plugin + delegates::TfLiteDelegatePtr armnnDelegate = delegatePlugIn->Create(); + + // Add Delegate to the builder + builder.AddDelegate(armnnDelegate.get()); +#else + LogAndThrow("Not built with Arm NN Tensorflow-Lite opaque delegate support."); +#endif + } + else if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate) { +#if defined(ARMNN_TFLITE_DELEGATE) // Create the Armnn Delegate // Populate a DelegateOptions from the ExecuteNetworkParams. armnnDelegate::DelegateOptions delegateOptions = m_Params.ToDelegateOptions(); @@ -40,6 +70,9 @@ TfLiteExecutor::TfLiteExecutor(const ExecuteNetworkParams& params, armnn::IRunti { LogAndThrow("Could not register ArmNN TfLite Delegate to TfLiteInterpreter."); } +#else + LogAndThrow("Not built with Arm NN Tensorflow-Lite delegate support."); +#endif } else { |