From 641c09157ebe25590e0ba378f70f05fc7466b8b1 Mon Sep 17 00:00:00 2001 From: Kristofer Jonsson Date: Mon, 31 Aug 2020 11:34:14 +0200 Subject: Moving message- and inference processes to core software Change-Id: I76e94440402d58848116d06ff3fd1ed2000ac505 --- .../include/inference_process.hpp | 65 ++++++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 applications/inference_process/include/inference_process.hpp (limited to 'applications/inference_process/include/inference_process.hpp') diff --git a/applications/inference_process/include/inference_process.hpp b/applications/inference_process/include/inference_process.hpp new file mode 100644 index 0000000..a5fef2c --- /dev/null +++ b/applications/inference_process/include/inference_process.hpp @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2019-2020 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include + +namespace InferenceProcess { +struct DataPtr { + void *data; + size_t size; + + DataPtr(void *data = nullptr, size_t size = 0); +}; + +struct InferenceJob { + std::string name; + DataPtr networkModel; + DataPtr input; + DataPtr output; + DataPtr expectedOutput; + size_t numBytesToPrint; + + InferenceJob(); + InferenceJob(const std::string &name, + const DataPtr &networkModel, + const DataPtr &input, + const DataPtr &output, + const DataPtr &expectedOutput, + size_t numBytesToPrint); +}; + +class InferenceProcess { +public: + InferenceProcess(); + + bool push(const InferenceJob &job); + bool runJob(InferenceJob &job); + bool run(bool exitOnEmpty = true); + +private: + volatile uint32_t lock; + std::queue inferenceJobQueue; + + void getLock(); + void freeLock(); +}; +} // namespace InferenceProcess -- cgit v1.2.1