From dd6d07b24bbf9023ebe8e8927be8aac3291d0f58 Mon Sep 17 00:00:00 2001 From: Kshitij Sisodia Date: Tue, 3 May 2022 10:10:14 +0100 Subject: MLECO-3178: Fix for error reporting TensorFlow Lite Micro's error reporting fix with some minor wiring changes. Change-Id: Ib011ab132c82db5809b75a7c4b983a3db2a808d6 --- source/application/tensorflow-lite-micro/Model.cc | 3 +-- .../tensorflow-lite-micro/TensorFlowLiteMicro.cc | 4 ++++ source/application/tensorflow-lite-micro/include/Model.hpp | 1 - .../tensorflow-lite-micro/include/TensorFlowLiteMicro.hpp | 13 +++++++++++++ 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/source/application/tensorflow-lite-micro/Model.cc b/source/application/tensorflow-lite-micro/Model.cc index adcf8d7..22a1a4d 100644 --- a/source/application/tensorflow-lite-micro/Model.cc +++ b/source/application/tensorflow-lite-micro/Model.cc @@ -32,7 +32,7 @@ arm::app::Model::Model() : m_inited (false), m_type(kTfLiteNoType) { - this->m_pErrorReporter = &this->m_uErrorReporter; + this->m_pErrorReporter = tflite::GetMicroErrorReporter(); } bool arm::app::Model::Init(tflite::MicroAllocator* allocator) @@ -108,7 +108,6 @@ bool arm::app::Model::Init(tflite::MicroAllocator* allocator) TfLiteStatus allocate_status = this->m_pInterpreter->AllocateTensors(); if (allocate_status != kTfLiteOk) { - this->m_pErrorReporter->Report("[ERROR] allocateTensors() failed"); printf_err("tensor allocation failed!\n"); delete this->m_pInterpreter; return false; diff --git a/source/application/tensorflow-lite-micro/TensorFlowLiteMicro.cc b/source/application/tensorflow-lite-micro/TensorFlowLiteMicro.cc index e82e9b5..8738e5c 100644 --- a/source/application/tensorflow-lite-micro/TensorFlowLiteMicro.cc +++ b/source/application/tensorflow-lite-micro/TensorFlowLiteMicro.cc @@ -40,3 +40,7 @@ arm::app::QuantParams arm::app::GetTensorQuantParams(TfLiteTensor* tensor) return params; } +extern "C" void DebugLog(const char* s) +{ + puts(s); +} diff --git a/source/application/tensorflow-lite-micro/include/Model.hpp b/source/application/tensorflow-lite-micro/include/Model.hpp index b814da4..151b680 100644 --- a/source/application/tensorflow-lite-micro/include/Model.hpp +++ b/source/application/tensorflow-lite-micro/include/Model.hpp @@ -126,7 +126,6 @@ namespace app { size_t GetActivationBufferSize(); private: - tflite::MicroErrorReporter m_uErrorReporter; /* Error reporter object. */ tflite::ErrorReporter* m_pErrorReporter = nullptr; /* Pointer to the error reporter. */ const tflite::Model* m_pModel = nullptr; /* Tflite model pointer. */ tflite::MicroInterpreter* m_pInterpreter = nullptr; /* Tflite interpreter. */ diff --git a/source/application/tensorflow-lite-micro/include/TensorFlowLiteMicro.hpp b/source/application/tensorflow-lite-micro/include/TensorFlowLiteMicro.hpp index 545ede1..f6639fd 100644 --- a/source/application/tensorflow-lite-micro/include/TensorFlowLiteMicro.hpp +++ b/source/application/tensorflow-lite-micro/include/TensorFlowLiteMicro.hpp @@ -60,13 +60,26 @@ namespace arm { namespace app { + /** Struct for quantization parameters. */ struct QuantParams { float scale = 1.0; int offset = 0; }; + /** + * @brief Gets the quantization parameters from a tensor + * @param[in] tensor pointer to the tensor. + * @return QuantParams object. + */ QuantParams GetTensorQuantParams(TfLiteTensor* tensor); + /** + * @brief String logging functionality expected to be defined + * by TensorFlow Lite Micro's error reporter. + * @param[in] s Pointer to the string. + */ + extern "C" void DebugLog(const char* s); + } /* namespace app */ } /* namespace arm */ -- cgit v1.2.1