aboutsummaryrefslogtreecommitdiff
path: root/applications/baremetal/main.cpp
blob: e1e01de5f205cca6228796176f8a0b50618bbbc4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
/*
 * Copyright (c) 2021 Arm Limited. All rights reserved.
 *
 * SPDX-License-Identifier: Apache-2.0
 *
 * Licensed under the Apache License, Version 2.0 (the License); you may
 * not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

/****************************************************************************
 * Includes
 ****************************************************************************/

// NPU driver
#include "ethosu_driver.h"
// Inference process
#include "inference_process.hpp"
// System includes
#include <stdio.h>
#include <vector>

// Model data
#include "input.h"
#include "model.h"
#include "output.h"

using namespace std;

/****************************************************************************
 * InferenceJob
 ****************************************************************************/

#ifndef TENSOR_ARENA_SIZE
#define TENSOR_ARENA_SIZE 2000000
#endif

__attribute__((section(".bss.tensor_arena"), aligned(16))) uint8_t TFLuTensorArena[TENSOR_ARENA_SIZE];

InferenceProcess::InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);

uint8_t outputData[176] __attribute__((aligned(16), section("output_data_sec")));

int runInference() {
    // Load inference data
    vector<InferenceProcess::DataPtr> input;
    input.push_back(InferenceProcess::DataPtr(inputData, sizeof(inputData)));

    vector<InferenceProcess::DataPtr> output;
    output.push_back(InferenceProcess::DataPtr(outputData, sizeof(outputData)));

    vector<InferenceProcess::DataPtr> expected;
    expected.push_back(InferenceProcess::DataPtr(expectedOutputData, sizeof(expectedOutputData)));

    // Create job
    InferenceProcess::InferenceJob job(string(modelName),
                                       InferenceProcess::DataPtr(networkModelData, sizeof(networkModelData)),
                                       input,
                                       output,
                                       expected,
                                       512,
                                       std::vector<uint8_t>(4),
                                       false);

    // Run job
    bool failed = inferenceProcess.runJob(job);
    printf("Status of executed job: ");
    printf(failed ? "Failed\n" : "Success\n");

    return failed;
}

int main() {
    int ret = runInference();
    return ret;
}