aboutsummaryrefslogtreecommitdiff
path: root/tests/ExecuteNetwork/TfliteExecutor.hpp
diff options
context:
space:
mode:
authorFinn Williams <finn.williams@arm.com>2022-06-20 13:48:20 +0100
committerNikhil Raj <nikhil.raj@arm.com>2022-07-08 15:19:18 +0100
commit452c58080e9f8f577de87e0c07d0097aac97f3b8 (patch)
tree0a3bd2cc754cde1b3133a914597d607c52ce75ff /tests/ExecuteNetwork/TfliteExecutor.hpp
parentc7b6de86431e26766b60a69bcfcde985af61a028 (diff)
downloadarmnn-452c58080e9f8f577de87e0c07d0097aac97f3b8.tar.gz
IVGCVSW-6650 Refactor ExecuteNetwork
* Remove InferenceModel * Add automatic IO type, shape and name configuration * Depreciate various redundant options * Add internal output comparison Signed-off-by: Finn Williams <finn.williams@arm.com> Change-Id: I2eca248bc91e1655a99ed94990efb8059f541fa9
Diffstat (limited to 'tests/ExecuteNetwork/TfliteExecutor.hpp')
-rw-r--r--tests/ExecuteNetwork/TfliteExecutor.hpp35
1 files changed, 35 insertions, 0 deletions
diff --git a/tests/ExecuteNetwork/TfliteExecutor.hpp b/tests/ExecuteNetwork/TfliteExecutor.hpp
new file mode 100644
index 0000000000..009c79488e
--- /dev/null
+++ b/tests/ExecuteNetwork/TfliteExecutor.hpp
@@ -0,0 +1,35 @@
+//
+// Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+#pragma once
+
+#include "IExecutor.hpp"
+#include "NetworkExecutionUtils/NetworkExecutionUtils.hpp"
+#include "ExecuteNetworkProgramOptions.hpp"
+#include "armnn/utility/NumericCast.hpp"
+#include "armnn/utility/Timer.hpp"
+
+#include <armnn_delegate.hpp>
+#include <DelegateOptions.hpp>
+
+#include <tensorflow/lite/c/common.h>
+#include <tensorflow/lite/interpreter.h>
+#include <tensorflow/lite/kernels/register.h>
+
+using namespace tflite;
+class TfLiteExecutor : public IExecutor
+{
+public:
+ TfLiteExecutor(const ExecuteNetworkParams& m_Params);
+
+ std::vector<const void *> Execute() override;
+ void PrintNetworkInfo() override{};
+ void CompareAndPrintResult(std::vector<const void*> otherOutput) override;
+
+private:
+ std::unique_ptr<tflite::FlatBufferModel> m_Model;
+ const ExecuteNetworkParams& m_Params;
+ std::unique_ptr<Interpreter> m_TfLiteInterpreter;
+};
+