From e6398cd54642a6a420b14003ad62309448dd724e Mon Sep 17 00:00:00 2001 From: Richard Burton Date: Wed, 13 Apr 2022 11:58:28 +0100 Subject: MLECO-3075: Add KWS use case API Removed some of the templates for feature calculation that we are unlikely to ever use. We might be able to refactor the feature caching and feature calculator code in the future to better integrate it with with PreProcess API. Signed-off-by: Richard Burton Change-Id: Ic0c0c581c71e2553d41ff72cd1ed3b3efa64fa92 --- source/use_case/img_class/src/ImgClassProcessing.cc | 8 +++++++- source/use_case/img_class/src/UseCaseHandler.cc | 17 +++++++++-------- 2 files changed, 16 insertions(+), 9 deletions(-) (limited to 'source/use_case/img_class/src') diff --git a/source/use_case/img_class/src/ImgClassProcessing.cc b/source/use_case/img_class/src/ImgClassProcessing.cc index e33e3c1..6ba88ad 100644 --- a/source/use_case/img_class/src/ImgClassProcessing.cc +++ b/source/use_case/img_class/src/ImgClassProcessing.cc @@ -23,6 +23,9 @@ namespace app { ImgClassPreProcess::ImgClassPreProcess(Model* model) { + if (!model->IsInited()) { + printf_err("Model is not initialised!.\n"); + } this->m_model = model; } @@ -35,7 +38,7 @@ namespace app { auto input = static_cast(data); TfLiteTensor* inputTensor = this->m_model->GetInputTensor(0); - memcpy(inputTensor->data.data, input, inputSize); + std::memcpy(inputTensor->data.data, input, inputSize); debug("Input tensor populated \n"); if (this->m_model->IsDataSigned()) { @@ -52,6 +55,9 @@ namespace app { m_labels{labels}, m_results{results} { + if (!model->IsInited()) { + printf_err("Model is not initialised!.\n"); + } this->m_model = model; } diff --git a/source/use_case/img_class/src/UseCaseHandler.cc b/source/use_case/img_class/src/UseCaseHandler.cc index 98e2b59..11a1aa8 100644 --- a/source/use_case/img_class/src/UseCaseHandler.cc +++ b/source/use_case/img_class/src/UseCaseHandler.cc @@ -37,6 +37,12 @@ namespace app { { auto& profiler = ctx.Get("profiler"); auto& model = ctx.Get("model"); + /* If the request has a valid size, set the image index as it might not be set. */ + if (imgIndex < NUMBER_OF_FILES) { + if (!SetAppCtxIfmIdx(ctx, imgIndex, "imgIndex")) { + return false; + } + } auto initialImIdx = ctx.Get("imgIndex"); constexpr uint32_t dataPsnImgDownscaleFactor = 2; @@ -46,12 +52,7 @@ namespace app { constexpr uint32_t dataPsnTxtInfStartX = 150; constexpr uint32_t dataPsnTxtInfStartY = 40; - /* If the request has a valid size, set the image index. */ - if (imgIndex < NUMBER_OF_FILES) { - if (!SetAppCtxIfmIdx(ctx, imgIndex, "imgIndex")) { - return false; - } - } + if (!model.IsInited()) { printf_err("Model is not initialised! Terminating processing.\n"); return false; @@ -102,7 +103,7 @@ namespace app { /* Display message on the LCD - inference running. */ hal_lcd_display_text(str_inf.c_str(), str_inf.size(), - dataPsnTxtInfStartX, dataPsnTxtInfStartY, false); + dataPsnTxtInfStartX, dataPsnTxtInfStartY, false); /* Select the image to run inference with. */ info("Running inference on image %" PRIu32 " => %s\n", ctx.Get("imgIndex"), @@ -129,7 +130,7 @@ namespace app { /* Erase. */ str_inf = std::string(str_inf.size(), ' '); hal_lcd_display_text(str_inf.c_str(), str_inf.size(), - dataPsnTxtInfStartX, dataPsnTxtInfStartY, false); + dataPsnTxtInfStartX, dataPsnTxtInfStartY, false); /* Add results to context for access outside handler. */ ctx.Set>("results", results); -- cgit v1.2.1