aboutsummaryrefslogtreecommitdiff
path: root/applications/message_handler/message_handler.cpp
diff options
context:
space:
mode:
authorDavide Grohmann <davide.grohmann@arm.com>2022-03-24 15:38:27 +0100
committerDavide Grohmann <davide.grohmann@arm.com>2022-05-09 09:36:57 +0200
commit160001cdc876e2e8b9fdc4453dc72081c809ed2d (patch)
tree583506efd1571715abe66c1a52dc8d9517df7ec9 /applications/message_handler/message_handler.cpp
parent8b53aad76ea95dc1f4c8ce64b6f8dc14f727b46f (diff)
downloadethos-u-core-platform-160001cdc876e2e8b9fdc4453dc72081c809ed2d.tar.gz
Add support for cancelling enqueued inferences
If an enqueued inference is cancelled it is simply removed from the queue. Both a cancel inference response with status done and a inference response with status cancelled are sent back. Also override the new operators to call in the FreeRTOS allocator instead of malloc/free. Change-Id: I243e678aa6b996084c9b9be1d1b00ffcecc75bc9
Diffstat (limited to 'applications/message_handler/message_handler.cpp')
-rw-r--r--applications/message_handler/message_handler.cpp39
1 files changed, 23 insertions, 16 deletions
diff --git a/applications/message_handler/message_handler.cpp b/applications/message_handler/message_handler.cpp
index f06d056..f109dc8 100644
--- a/applications/message_handler/message_handler.cpp
+++ b/applications/message_handler/message_handler.cpp
@@ -138,12 +138,13 @@ bool getNetwork(const ethosu_core_network_buffer &buffer, void *&data, size_t &s
}; // namespace
-IncomingMessageHandler::IncomingMessageHandler(EthosU::ethosu_core_queue &_inputMessageQueue,
- EthosU::ethosu_core_queue &_outputMessageQueue,
- Mailbox::Mailbox &_mailbox,
- QueueHandle_t _inferenceInputQueue,
- QueueHandle_t _inferenceOutputQueue,
- SemaphoreHandle_t _messageNotify) :
+IncomingMessageHandler::IncomingMessageHandler(
+ EthosU::ethosu_core_queue &_inputMessageQueue,
+ EthosU::ethosu_core_queue &_outputMessageQueue,
+ Mailbox::Mailbox &_mailbox,
+ std::shared_ptr<Queue<EthosU::ethosu_core_inference_req>> _inferenceInputQueue,
+ QueueHandle_t _inferenceOutputQueue,
+ SemaphoreHandle_t _messageNotify) :
inputMessageQueue(_inputMessageQueue),
outputMessageQueue(_outputMessageQueue), mailbox(_mailbox), inferenceInputQueue(_inferenceInputQueue),
inferenceOutputQueue(_inferenceOutputQueue), messageNotify(_messageNotify) {
@@ -166,7 +167,7 @@ void IncomingMessageHandler::handleIrq(void *userArg) {
return;
}
IncomingMessageHandler *_this = reinterpret_cast<IncomingMessageHandler *>(userArg);
- xSemaphoreGive(_this->messageNotify);
+ xSemaphoreGiveFromISR(_this->messageNotify, nullptr);
}
void IncomingMessageHandler::sendErrorAndResetQueue(ethosu_core_msg_err_type type, const char *message) {
@@ -287,7 +288,7 @@ bool IncomingMessageHandler::handleMessage() {
}
printf("]\n");
- if (pdTRUE != xQueueSend(inferenceInputQueue, &req, 0)) {
+ if (!inferenceInputQueue->push(req)) {
printf("Msg: Inference queue full. Rejecting inference user_arg=0x%" PRIx64 "\n", req.user_arg);
sendFailedInferenceRsp(req.user_arg, ETHOSU_CORE_STATUS_REJECTED);
}
@@ -303,7 +304,15 @@ bool IncomingMessageHandler::handleMessage() {
req.user_arg,
req.inference_handle);
- sendCancelInferenceRsp(req.user_arg, ETHOSU_CORE_STATUS_ERROR);
+ bool found =
+ inferenceInputQueue->erase([req](auto &inf_req) { return inf_req.user_arg == req.inference_handle; });
+
+ // NOTE: send an inference response with status ABORTED if the inference has been droped from the queue
+ if (found) {
+ sendFailedInferenceRsp(req.inference_handle, ETHOSU_CORE_STATUS_ABORTED);
+ }
+
+ sendCancelInferenceRsp(req.user_arg, found ? ETHOSU_CORE_STATUS_OK : ETHOSU_CORE_STATUS_ERROR);
break;
}
case ETHOSU_CORE_MSG_NETWORK_INFO_REQ: {
@@ -450,22 +459,20 @@ void IncomingMessageHandler::readCapabilties(ethosu_core_msg_capabilities_rsp &r
* InferenceHandler
****************************************************************************/
-InferenceHandler::InferenceHandler(uint8_t *_tensorArena,
- size_t _arenaSize,
- QueueHandle_t _inferenceInputQueue,
+InferenceHandler::InferenceHandler(uint8_t *tensorArena,
+ size_t arenaSize,
+ std::shared_ptr<Queue<EthosU::ethosu_core_inference_req>> _inferenceInputQueue,
QueueHandle_t _inferenceOutputQueue,
SemaphoreHandle_t _messageNotify) :
inferenceInputQueue(_inferenceInputQueue),
- inferenceOutputQueue(_inferenceOutputQueue), messageNotify(_messageNotify), inference(_tensorArena, _arenaSize) {}
+ inferenceOutputQueue(_inferenceOutputQueue), messageNotify(_messageNotify), inference(tensorArena, arenaSize) {}
void InferenceHandler::run() {
ethosu_core_inference_req req;
ethosu_core_inference_rsp rsp;
while (true) {
- if (pdTRUE != xQueueReceive(inferenceInputQueue, &req, portMAX_DELAY)) {
- continue;
- }
+ inferenceInputQueue->pop(req);
runInference(req, rsp);