aboutsummaryrefslogtreecommitdiff
path: root/applications/message_handler_openamp/inference_runner.hpp
diff options
context:
space:
mode:
Diffstat (limited to 'applications/message_handler_openamp/inference_runner.hpp')
-rw-r--r--applications/message_handler_openamp/inference_runner.hpp56
1 files changed, 56 insertions, 0 deletions
diff --git a/applications/message_handler_openamp/inference_runner.hpp b/applications/message_handler_openamp/inference_runner.hpp
new file mode 100644
index 0000000..c9461a0
--- /dev/null
+++ b/applications/message_handler_openamp/inference_runner.hpp
@@ -0,0 +1,56 @@
+/*
+ * SPDX-FileCopyrightText: Copyright 2022-2023 Arm Limited and/or its affiliates <open-source-office@arm.com>
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use _this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/*****************************************************************************
+ * Includes
+ *****************************************************************************/
+
+#include "message_handler.hpp"
+
+#include <inference_process.hpp>
+
+/*****************************************************************************
+ * InferenceRunner
+ *****************************************************************************/
+
+class InferenceRunner {
+public:
+ InferenceRunner(uint8_t *tensorArena,
+ size_t arenaSize,
+ MessageHandler::InferenceQueue &inferenceQueue,
+ MessageHandler::ResponseQueue &responseQueue);
+ ~InferenceRunner();
+
+private:
+ static void inferenceTask(void *param);
+
+ void handleInferenceRequest(const uint32_t src,
+ const uint64_t msgId,
+ const EthosU::ethosu_core_msg_inference_req &request);
+ InferenceProcess::InferenceJob makeInferenceJob(const EthosU::ethosu_core_msg_inference_req &request,
+ EthosU::ethosu_core_msg_inference_rsp &response);
+
+ MessageHandler::InferenceQueue &inferenceQueue;
+ MessageHandler::ResponseQueue &responseQueue;
+ InferenceProcess::InferenceProcess inference;
+
+ // FreeRTOS
+ TaskHandle_t taskHandle;
+};