diff options
Diffstat (limited to 'source/application/main')
-rw-r--r-- | source/application/main/include/BaseProcessing.hpp | 73 | ||||
-rw-r--r-- | source/application/main/include/UseCaseCommonUtils.hpp | 62 |
2 files changed, 135 insertions, 0 deletions
diff --git a/source/application/main/include/BaseProcessing.hpp b/source/application/main/include/BaseProcessing.hpp new file mode 100644 index 0000000..c1c3255 --- /dev/null +++ b/source/application/main/include/BaseProcessing.hpp @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2022 Arm Limited. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef BASE_PROCESSING_HPP +#define BASE_PROCESSING_HPP + +#include "Model.hpp" + +namespace arm { +namespace app { + + /** + * @brief Base class exposing pre-processing API. + * Use cases should provide their own PreProcessing class that inherits from this one. + * All steps required to take raw input data and populate tensors ready for inference + * should be handled. + */ + class BasePreProcess { + + public: + virtual ~BasePreProcess() = default; + + /** + * @brief Should perform pre-processing of 'raw' input data and load it into + * TFLite Micro input tensors ready for inference + * @param[in] input Pointer to the data that pre-processing will work on. + * @param[in] inputSize Size of the input data. + * @return true if successful, false otherwise. + **/ + virtual bool DoPreProcess(const void* input, size_t inputSize) = 0; + + protected: + Model* m_model = nullptr; + }; + + /** + * @brief Base class exposing post-processing API. + * Use cases should provide their own PostProcessing class that inherits from this one. + * All steps required to take inference output and populate results vectors should be handled. + */ + class BasePostProcess { + + public: + virtual ~BasePostProcess() = default; + + /** + * @brief Should perform post-processing of the result of inference then populate + * populate result data for any later use. + * @return true if successful, false otherwise. + **/ + virtual bool DoPostProcess() = 0; + + protected: + Model* m_model = nullptr; + }; + +} /* namespace app */ +} /* namespace arm */ + +#endif /* BASE_PROCESSING_HPP */
\ No newline at end of file diff --git a/source/application/main/include/UseCaseCommonUtils.hpp b/source/application/main/include/UseCaseCommonUtils.hpp index 9b6d550..f79f6ed 100644 --- a/source/application/main/include/UseCaseCommonUtils.hpp +++ b/source/application/main/include/UseCaseCommonUtils.hpp @@ -24,6 +24,7 @@ #include "UseCaseHandler.hpp" /* Handlers for different user options. */ #include "Classifier.hpp" /* Classifier. */ #include "InputFiles.hpp" +#include "BaseProcessing.hpp" void DisplayCommonMenu(); @@ -107,6 +108,67 @@ namespace app { **/ bool ListFilesHandler(ApplicationContext& ctx); + /** + * @brief Use case runner class that will handle calling pre-processing, + * inference and post-processing. + * After constructing an instance of this class the user can call + * PreProcess(), RunInference() and PostProcess() to perform inference. + */ + class UseCaseRunner { + + private: + BasePreProcess* m_preProcess; + BasePostProcess* m_postProcess; + Model* m_model; + + public: + explicit UseCaseRunner(BasePreProcess* preprocess, BasePostProcess* postprocess, Model* model) + : m_preProcess{preprocess}, + m_postProcess{postprocess}, + m_model{model} + {}; + + /** + * @brief Runs pre-processing as defined by PreProcess object within the runner. + * Templated for the input data type. + * @param[in] inputData Pointer to the data that inference will be performed on. + * @param[in] inputSize Size of the input data that inference will be performed on. + * @return true if successful, false otherwise. + **/ + template<typename T> + bool PreProcess(T* inputData, size_t inputSize) { + if (!this->m_preProcess->DoPreProcess(inputData, inputSize)) { + printf_err("Pre-processing failed."); + return false; + } + return true; + } + + /** + * @brief Runs inference with the Model object within the runner. + * @return true if successful, false otherwise. + **/ + bool RunInference() { + if (!this->m_model->RunInference()) { + printf_err("Inference failed."); + return false; + } + return true; + } + + /** + * @brief Runs post-processing as defined by PostProcess object within the runner. + * @return true if successful, false otherwise. + **/ + bool PostProcess() { + if (!this->m_postProcess->DoPostProcess()) { + printf_err("Post-processing failed."); + return false; + } + return true; + } + }; + } /* namespace app */ } /* namespace arm */ |