aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLedion Daja <ledion.daja@arm.com>2022-11-21 09:23:50 +0100
committerLedion Daja <ledion.daja@arm.com>2022-11-30 14:07:22 +0100
commita7d025a89dff1d532add998392e2dba6ec7129a4 (patch)
tree06b76e6500e8e46d33b73356593a4491219b8107
parent3227fbddb304683e74af7a485455067bdea40fb8 (diff)
downloadethos-u-core-software-a7d025a89dff1d532add998392e2dba6ec7129a4.tar.gz
Extend inference_process library to use MicroMutableOpResolver
Added compilation flag to allow the inference_process library to use a MicroMutableOpResolver as an alternative to AllOpsResolver. This allows to only make use of the needed operators instead of the complete list of operators, reducing thus the memory footprint of the application. Change-Id: If1d6751b12e8aa301bb466e3ffae92406200eab4
-rw-r--r--applications/inference_process/CMakeLists.txt4
-rw-r--r--applications/inference_process/src/inference_process.cpp10
2 files changed, 14 insertions, 0 deletions
diff --git a/applications/inference_process/CMakeLists.txt b/applications/inference_process/CMakeLists.txt
index 47fae19..4142f11 100644
--- a/applications/inference_process/CMakeLists.txt
+++ b/applications/inference_process/CMakeLists.txt
@@ -37,3 +37,7 @@ if (TARGET ethosu_log)
endif()
target_sources(inference_process PRIVATE src/inference_process.cpp)
+
+if (DEFINED INFERENCE_PROCESS_OPS_RESOLVER)
+ target_compile_definitions(inference_process PRIVATE INFERENCE_PROCESS_OPS_RESOLVER=${INFERENCE_PROCESS_OPS_RESOLVER})
+endif() \ No newline at end of file
diff --git a/applications/inference_process/src/inference_process.cpp b/applications/inference_process/src/inference_process.cpp
index 71a3128..88bc8f4 100644
--- a/applications/inference_process/src/inference_process.cpp
+++ b/applications/inference_process/src/inference_process.cpp
@@ -16,7 +16,13 @@
* limitations under the License.
*/
+#ifndef INFERENCE_PROCESS_OPS_RESOLVER
#include "tensorflow/lite/micro/all_ops_resolver.h"
+#else
+#define _STRINGIFY(a) #a
+#define STRINGIFY(a) _STRINGIFY(a)
+#include STRINGIFY(INFERENCE_PROCESS_OPS_RESOLVER)
+#endif
#include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h"
#include "tensorflow/lite/micro/micro_interpreter.h"
#include "tensorflow/lite/micro/micro_time.h"
@@ -144,7 +150,11 @@ bool InferenceProcess::runJob(InferenceJob &job) {
}
// Create the TFL micro interpreter
+#ifndef INFERENCE_PROCESS_OPS_RESOLVER
tflite::AllOpsResolver resolver;
+#else
+ tflite::MicroMutableOpResolver<kNumberOperators> resolver = get_resolver();
+#endif
tflite::ArmProfiler profiler;
tflite::MicroInterpreter interpreter(model, resolver, tensorArena, tensorArenaSize, nullptr, &profiler);