diff options
author | Kristofer Jonsson <kristofer.jonsson@arm.com> | 2020-08-31 11:34:14 +0200 |
---|---|---|
committer | Kristofer Jonsson <kristofer.jonsson@arm.com> | 2020-09-01 09:47:51 +0200 |
commit | 641c09157ebe25590e0ba378f70f05fc7466b8b1 (patch) | |
tree | 86d74b65c935e4485d32a6c94381a46b2d822f6a /applications/inference_process/include | |
parent | 7e0775474aa06ecd264c5cec299f73e66c37af9e (diff) | |
download | ethos-u-core-software-641c09157ebe25590e0ba378f70f05fc7466b8b1.tar.gz |
Moving message- and inference processes to core software20.08
Change-Id: I76e94440402d58848116d06ff3fd1ed2000ac505
Diffstat (limited to 'applications/inference_process/include')
-rw-r--r-- | applications/inference_process/include/inference_process.hpp | 65 |
1 files changed, 65 insertions, 0 deletions
diff --git a/applications/inference_process/include/inference_process.hpp b/applications/inference_process/include/inference_process.hpp new file mode 100644 index 0000000..a5fef2c --- /dev/null +++ b/applications/inference_process/include/inference_process.hpp @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2019-2020 Arm Limited. All rights reserved. + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <queue> +#include <stdlib.h> +#include <string> + +namespace InferenceProcess { +struct DataPtr { + void *data; + size_t size; + + DataPtr(void *data = nullptr, size_t size = 0); +}; + +struct InferenceJob { + std::string name; + DataPtr networkModel; + DataPtr input; + DataPtr output; + DataPtr expectedOutput; + size_t numBytesToPrint; + + InferenceJob(); + InferenceJob(const std::string &name, + const DataPtr &networkModel, + const DataPtr &input, + const DataPtr &output, + const DataPtr &expectedOutput, + size_t numBytesToPrint); +}; + +class InferenceProcess { +public: + InferenceProcess(); + + bool push(const InferenceJob &job); + bool runJob(InferenceJob &job); + bool run(bool exitOnEmpty = true); + +private: + volatile uint32_t lock; + std::queue<InferenceJob> inferenceJobQueue; + + void getLock(); + void freeLock(); +}; +} // namespace InferenceProcess |