From 5ee069ef6c4e7002ddc19ade8a1fbca7ba027d68 Mon Sep 17 00:00:00 2001 From: Cathal Corbett Date: Tue, 24 May 2022 17:25:57 +0100 Subject: IVGCVSW-6255 Investigate and fix running mobilebert with the TfLiteDelegate (CpuRef) * Fixed bug occuring in Ref Gather Workload. Signed-off-by: Cathal Corbett Change-Id: I3ee79f475fd9909bfbd4afb58f698439f26d6d65 --- tests/ExecuteNetwork/ExecuteNetwork.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index f0a3d0821e..153fe5bcc7 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -110,16 +110,14 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params, const armnn::IRun std::cout << "Running on TfLite without ArmNN delegate\n"; } - // Load (or generate) input data for inference - armnn::Optional dataFile = params.m_GenerateTensorData - ? armnn::EmptyOptional() - : armnn::MakeOptional(params.m_InputTensorDataFilePaths[0]); - const size_t numInputs = params.m_InputNames.size(); - // Populate input tensor of interpreter for(unsigned int inputIndex = 0; inputIndex < numInputs; ++inputIndex) { + // Load (or generate) input data for inference + armnn::Optional dataFile = params.m_GenerateTensorData ? armnn::EmptyOptional() : + armnn::MakeOptional(params.m_InputTensorDataFilePaths[inputIndex]); + int input = tfLiteInterpreter->inputs()[inputIndex]; TfLiteIntArray* inputDims = tfLiteInterpreter->tensor(input)->dims; -- cgit v1.2.1