summaryrefslogtreecommitdiff
path: root/source/application/main/include/UseCaseCommonUtils.hpp
blob: f79f6edf19f5790e624029041cd63d526d1a3db9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
/*
 * Copyright (c) 2021-2022  Arm Limited. All rights reserved.
 * SPDX-License-Identifier: Apache-2.0
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
#ifndef USECASE_COMMON_UTILS_HPP
#define USECASE_COMMON_UTILS_HPP

#include "hal.h"
#include "Model.hpp"
#include "AppContext.hpp"
#include "Profiler.hpp"
#include "UseCaseHandler.hpp"       /* Handlers for different user options. */
#include "Classifier.hpp"           /* Classifier. */
#include "InputFiles.hpp"
#include "BaseProcessing.hpp"


void DisplayCommonMenu();

  /**
   * @brief           Presents inference results using the data presentation
   *                  object.
   * @param[in]       results     Vector of classification results to be displayed.
   * @return          true if successful, false otherwise.
   **/
bool PresentInferenceResult(const std::vector<arm::app::ClassificationResult>& results);


/**
   * @brief           Helper function to increment current input feature vector index.
   * @param[in,out]   ctx       Pointer to the application context object.
   * @param[in]       useCase   Use case name
   **/
void IncrementAppCtxIfmIdx(arm::app::ApplicationContext& ctx, const std::string& useCase);

/**
   * @brief           Helper function to set the input feature map index.
   * @param[in,out]   ctx          Pointer to the application context object.
   * @param[in]       idx          Value to be set.
   * @param[in]       ctxIfmName   Input Feature Map name
   * @return          true if index is set, false otherwise.
   **/
bool SetAppCtxIfmIdx(arm::app::ApplicationContext& ctx, uint32_t idx, const std::string& ctxIfmName);


namespace common {

  enum OPCODES {
        MENU_OPT_RUN_INF_NEXT = 1, /* Run on next vector. */
        MENU_OPT_RUN_INF_CHOSEN, /* Run on a user provided vector index. */
        MENU_OPT_RUN_INF_ALL, /* Run inference on all. */
        MENU_OPT_SHOW_MODEL_INFO, /* Show model info. */
        MENU_OPT_LIST_IFM /* List the current IFM. */
  };

}

namespace arm {
namespace app {
    /**
     * @brief           Run inference using given model
     *                  object. If profiling is enabled, it will log the
     *                  statistics too.
     * @param[in]       model      Reference to the initialised model.
     * @param[in]       profiler   Reference to the initialised profiler.
     * @return          true if inference succeeds, false otherwise.
     **/
    bool RunInference(arm::app::Model& model, Profiler& profiler);

    /**
     * @brief           Read input and return as an integer.
     * @return          Integer value corresponding to the user input.
     **/
    int ReadUserInputAsInt();

#if VERIFY_TEST_OUTPUT
    /**
     * @brief       Helper function to dump a tensor to stdout
     * @param[in]   tensor  tensor to be dumped
     * @param[in]   lineBreakForNumElements     number of elements
     *              after which line break will be added.
     **/
    void DumpTensor(const TfLiteTensor* tensor,
                    size_t lineBreakForNumElements = 16);


    void DumpTensorData(const uint8_t* tensorData,
                        size_t size,
                        size_t lineBreakForNumElements = 16);
#endif /* VERIFY_TEST_OUTPUT */

    /**
     * @brief       List the files baked in the application.
     * @param[in]   ctx   Reference to the application context.
     * @return      true or false based on event being handled.
     **/
    bool ListFilesHandler(ApplicationContext& ctx);

    /**
     * @brief   Use case runner class that will handle calling pre-processing,
     *          inference and post-processing.
     *          After constructing an instance of this class the user can call
     *          PreProcess(), RunInference() and PostProcess() to perform inference.
     */
    class UseCaseRunner {

    private:
        BasePreProcess* m_preProcess;
        BasePostProcess* m_postProcess;
        Model* m_model;

    public:
        explicit UseCaseRunner(BasePreProcess* preprocess, BasePostProcess* postprocess, Model* model)
        : m_preProcess{preprocess},
          m_postProcess{postprocess},
          m_model{model}
          {};

        /**
         * @brief       Runs pre-processing as defined by PreProcess object within the runner.
         *              Templated for the input data type.
         * @param[in]   inputData    Pointer to the data that inference will be performed on.
         * @param[in]   inputSize    Size of the input data that inference will be performed on.
         * @return      true if successful, false otherwise.
         **/
        template<typename T>
        bool PreProcess(T* inputData, size_t inputSize) {
            if (!this->m_preProcess->DoPreProcess(inputData, inputSize)) {
                printf_err("Pre-processing failed.");
                return false;
            }
            return true;
        }

        /**
         * @brief       Runs inference with the Model object within the runner.
         * @return      true if successful, false otherwise.
         **/
        bool RunInference() {
            if (!this->m_model->RunInference()) {
                printf_err("Inference failed.");
                return false;
            }
            return true;
        }

        /**
         * @brief       Runs post-processing as defined by PostProcess object within the runner.
         * @return      true if successful, false otherwise.
         **/
        bool PostProcess() {
            if (!this->m_postProcess->DoPostProcess()) {
                printf_err("Post-processing failed.");
                return false;
            }
            return true;
        }
    };

} /* namespace app */
} /* namespace arm */


#endif /* USECASE_COMMON_UTILS_HPP */