1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
|
/*
* Copyright (c) 2021 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/****************************************************************************
* Includes
****************************************************************************/
// NPU driver
#include "ethosu_driver.h"
// Inference process
#include "inference_process.hpp"
// System includes
#include <stdio.h>
#include <vector>
// Model data
#include "input.h"
#include "model.h"
#include "output.h"
#ifdef ETHOSU
#include <ethosu_monitor.hpp>
#include <pmu_ethosu.h>
#endif
using namespace std;
/****************************************************************************
* InferenceJob
****************************************************************************/
#ifndef TENSOR_ARENA_SIZE
#define TENSOR_ARENA_SIZE 2000000
#endif
__attribute__((section(".bss.tensor_arena"), aligned(16))) uint8_t TFLuTensorArena[TENSOR_ARENA_SIZE];
InferenceProcess::InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);
uint8_t outputData[sizeof(expectedOutputData)] __attribute__((aligned(16), section("output_data_sec")));
#ifndef ETHOSU_PMU_EVENT_0
#define ETHOSU_PMU_EVENT_0 ETHOSU_PMU_CYCLE
#endif
#ifndef ETHOSU_PMU_EVENT_1
#define ETHOSU_PMU_EVENT_1 ETHOSU_PMU_NPU_ACTIVE
#endif
#ifndef ETHOSU_PMU_EVENT_2
#define ETHOSU_PMU_EVENT_2 ETHOSU_PMU_NO_EVENT
#endif
#ifndef ETHOSU_PMU_EVENT_3
#define ETHOSU_PMU_EVENT_3 ETHOSU_PMU_NO_EVENT
#endif
#ifdef ETHOSU
constexpr int32_t EventComponentNo = 0x00;
namespace {
std::vector<ethosu_pmu_event_type> pmuEventConfig{ethosu_pmu_event_type(ETHOSU_PMU_EVENT_0),
ethosu_pmu_event_type(ETHOSU_PMU_EVENT_1),
ethosu_pmu_event_type(ETHOSU_PMU_EVENT_2),
ethosu_pmu_event_type(ETHOSU_PMU_EVENT_3)};
std::vector<int32_t> eventRecMessageIds{EventID(EventLevelDetail, EventComponentNo, ETHOSU_PMU_EVENT_0),
EventID(EventLevelDetail, EventComponentNo, ETHOSU_PMU_EVENT_1),
EventID(EventLevelDetail, EventComponentNo, ETHOSU_PMU_EVENT_2),
EventID(EventLevelDetail, EventComponentNo, ETHOSU_PMU_EVENT_3)};
const uint32_t delayMs = SystemCoreClock / 60ul;
struct ethosu_driver *ethosuDrv;
EthosUMonitor ethosuMonitor(eventRecMessageIds, EthosUMonitor::Backend::PRINTF);
} // namespace
extern "C" {
void SysTick_Handler(void) {
ethosuMonitor.monitorSample(ethosuDrv);
}
void ethosu_inference_begin(struct ethosu_driver *drv, const void *) {
ethosuDrv = drv;
ethosuMonitor.configure(drv, pmuEventConfig);
// Enable polling
SysTick_Config(delayMs);
}
void ethosu_inference_end(struct ethosu_driver *drv, const void *) {
// Disable polling
SysTick->CTRL = 0;
ethosuDrv = 0;
ethosuMonitor.monitorSample(drv);
ethosuMonitor.release(drv);
}
}
#endif
int runInference() {
// Load inference data
vector<InferenceProcess::DataPtr> input;
input.push_back(InferenceProcess::DataPtr(inputData, sizeof(inputData)));
vector<InferenceProcess::DataPtr> output;
output.push_back(InferenceProcess::DataPtr(outputData, sizeof(outputData)));
vector<InferenceProcess::DataPtr> expected;
expected.push_back(InferenceProcess::DataPtr(expectedOutputData, sizeof(expectedOutputData)));
// Create job
InferenceProcess::InferenceJob job(string(modelName),
InferenceProcess::DataPtr(networkModelData, sizeof(networkModelData)),
input,
output,
expected,
512,
std::vector<uint8_t>(4),
false);
// Run job
bool failed = inferenceProcess.runJob(job);
printf("Status of executed job: ");
printf(failed ? "Failed\n" : "Success\n");
return failed;
}
int main() {
#ifdef ETHOSU
EventRecorderInitialize(EventRecordAll, 1);
#endif
int ret = runInference();
return ret;
}
|