From a7d025a89dff1d532add998392e2dba6ec7129a4 Mon Sep 17 00:00:00 2001 From: Ledion Daja Date: Mon, 21 Nov 2022 09:23:50 +0100 Subject: Extend inference_process library to use MicroMutableOpResolver Added compilation flag to allow the inference_process library to use a MicroMutableOpResolver as an alternative to AllOpsResolver. This allows to only make use of the needed operators instead of the complete list of operators, reducing thus the memory footprint of the application. Change-Id: If1d6751b12e8aa301bb466e3ffae92406200eab4 --- applications/inference_process/CMakeLists.txt | 4 ++++ applications/inference_process/src/inference_process.cpp | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/applications/inference_process/CMakeLists.txt b/applications/inference_process/CMakeLists.txt index 47fae19..4142f11 100644 --- a/applications/inference_process/CMakeLists.txt +++ b/applications/inference_process/CMakeLists.txt @@ -37,3 +37,7 @@ if (TARGET ethosu_log) endif() target_sources(inference_process PRIVATE src/inference_process.cpp) + +if (DEFINED INFERENCE_PROCESS_OPS_RESOLVER) + target_compile_definitions(inference_process PRIVATE INFERENCE_PROCESS_OPS_RESOLVER=${INFERENCE_PROCESS_OPS_RESOLVER}) +endif() \ No newline at end of file diff --git a/applications/inference_process/src/inference_process.cpp b/applications/inference_process/src/inference_process.cpp index 71a3128..88bc8f4 100644 --- a/applications/inference_process/src/inference_process.cpp +++ b/applications/inference_process/src/inference_process.cpp @@ -16,7 +16,13 @@ * limitations under the License. */ +#ifndef INFERENCE_PROCESS_OPS_RESOLVER #include "tensorflow/lite/micro/all_ops_resolver.h" +#else +#define _STRINGIFY(a) #a +#define STRINGIFY(a) _STRINGIFY(a) +#include STRINGIFY(INFERENCE_PROCESS_OPS_RESOLVER) +#endif #include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h" #include "tensorflow/lite/micro/micro_interpreter.h" #include "tensorflow/lite/micro/micro_time.h" @@ -144,7 +150,11 @@ bool InferenceProcess::runJob(InferenceJob &job) { } // Create the TFL micro interpreter +#ifndef INFERENCE_PROCESS_OPS_RESOLVER tflite::AllOpsResolver resolver; +#else + tflite::MicroMutableOpResolver resolver = get_resolver(); +#endif tflite::ArmProfiler profiler; tflite::MicroInterpreter interpreter(model, resolver, tensorArena, tensorArenaSize, nullptr, &profiler); -- cgit v1.2.1