aboutsummaryrefslogtreecommitdiff
path: root/applications/baremetal/main.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'applications/baremetal/main.cpp')
-rw-r--r--applications/baremetal/main.cpp33
1 files changed, 11 insertions, 22 deletions
diff --git a/applications/baremetal/main.cpp b/applications/baremetal/main.cpp
index ea5f03c..e9b398a 100644
--- a/applications/baremetal/main.cpp
+++ b/applications/baremetal/main.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2021 Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2022 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
@@ -39,6 +39,7 @@
#endif
using namespace std;
+using namespace InferenceProcess;
/****************************************************************************
* InferenceJob
@@ -50,7 +51,7 @@ using namespace std;
__attribute__((section(".bss.tensor_arena"), aligned(16))) uint8_t TFLuTensorArena[TENSOR_ARENA_SIZE];
-InferenceProcess::InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);
+class InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);
uint8_t outputData[sizeof(expectedOutputData)] __attribute__((aligned(16), section("output_data_sec")));
@@ -97,7 +98,7 @@ void SysTick_Handler(void) {
ethosuMonitor.monitorSample(ethosuDrv);
}
-void ethosu_inference_begin(struct ethosu_driver *drv, const void *) {
+void ethosu_inference_begin(struct ethosu_driver *drv, void *) {
ethosuDrv = drv;
ethosuMonitor.configure(drv, pmuEventConfig);
@@ -105,7 +106,7 @@ void ethosu_inference_begin(struct ethosu_driver *drv, const void *) {
SysTick_Config(delayMs);
}
-void ethosu_inference_end(struct ethosu_driver *drv, const void *) {
+void ethosu_inference_end(struct ethosu_driver *drv, void *) {
// Disable polling
SysTick->CTRL = 0;
@@ -117,25 +118,13 @@ void ethosu_inference_end(struct ethosu_driver *drv, const void *) {
#endif
int runInference() {
- // Load inference data
- vector<InferenceProcess::DataPtr> input;
- input.push_back(InferenceProcess::DataPtr(inputData, sizeof(inputData)));
-
- vector<InferenceProcess::DataPtr> output;
- output.push_back(InferenceProcess::DataPtr(outputData, sizeof(outputData)));
-
- vector<InferenceProcess::DataPtr> expected;
- expected.push_back(InferenceProcess::DataPtr(expectedOutputData, sizeof(expectedOutputData)));
-
// Create job
- InferenceProcess::InferenceJob job(string(modelName),
- InferenceProcess::DataPtr(networkModelData, sizeof(networkModelData)),
- input,
- output,
- expected,
- 512,
- std::vector<uint8_t>(4),
- false);
+ InferenceJob job(string(modelName),
+ DataPtr(networkModelData, sizeof(networkModelData)),
+ {DataPtr(inputData, sizeof(inputData))},
+ {DataPtr(outputData, sizeof(outputData))},
+ {DataPtr(expectedOutputData, sizeof(expectedOutputData))},
+ 512);
// Run job
bool failed = inferenceProcess.runJob(job);