diff options
author | alexander <alexander.efremov@arm.com> | 2021-03-26 21:42:19 +0000 |
---|---|---|
committer | Kshitij Sisodia <kshitij.sisodia@arm.com> | 2021-03-29 16:29:55 +0100 |
commit | 3c79893217bc632c9b0efa815091bef3c779490c (patch) | |
tree | ad06b444557eb8124652b45621d736fa1b92f65d /source/use_case/inference_runner/src | |
parent | 6ad6d55715928de72979b04194da1bdf04a4c51b (diff) | |
download | ml-embedded-evaluation-kit-3c79893217bc632c9b0efa815091bef3c779490c.tar.gz |
Opensource ML embedded evaluation kit21.03
Change-Id: I12e807f19f5cacad7cef82572b6dd48252fd61fd
Diffstat (limited to 'source/use_case/inference_runner/src')
-rw-r--r-- | source/use_case/inference_runner/src/MainLoop.cc | 51 | ||||
-rw-r--r-- | source/use_case/inference_runner/src/TestModel.cc | 36 | ||||
-rw-r--r-- | source/use_case/inference_runner/src/UseCaseHandler.cc | 88 |
3 files changed, 175 insertions, 0 deletions
diff --git a/source/use_case/inference_runner/src/MainLoop.cc b/source/use_case/inference_runner/src/MainLoop.cc new file mode 100644 index 0000000..b110a24 --- /dev/null +++ b/source/use_case/inference_runner/src/MainLoop.cc @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2021 Arm Limited. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "hal.h" /* Brings in platform definitions. */ +#include "TestModel.hpp" /* Model class for running inference. */ +#include "UseCaseHandler.hpp" /* Handlers for different user options. */ +#include "UseCaseCommonUtils.hpp" /* Utils functions. */ + +enum opcodes +{ + MENU_OPT_RUN_INF_NEXT = 1, /* Run on next vector. */ + MENU_OPT_SHOW_MODEL_INFO, /* Show model info. */ +}; + +void main_loop(hal_platform& platform) +{ + arm::app::TestModel model; /* Model wrapper object. */ + + /* Load the model. */ + if (!model.Init()) { + printf_err("Failed to initialise model\n"); + return; + } + + /* Instantiate application context. */ + arm::app::ApplicationContext caseContext; + + caseContext.Set<hal_platform&>("platform", platform); + caseContext.Set<arm::app::Model&>("model", model); + caseContext.Set<uint32_t>("imgIndex", 0); + + /* Loop. */ + if (RunInferenceHandler(caseContext)) { + info("Inference completed.\n"); + } else { + printf_err("Inference failed.\n"); + } +} diff --git a/source/use_case/inference_runner/src/TestModel.cc b/source/use_case/inference_runner/src/TestModel.cc new file mode 100644 index 0000000..0926a96 --- /dev/null +++ b/source/use_case/inference_runner/src/TestModel.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2021 Arm Limited. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "TestModel.hpp" + +#include "hal.h" + +const tflite::AllOpsResolver& arm::app::TestModel::GetOpResolver() +{ + return this->_m_opResolver; +} + +extern uint8_t* GetModelPointer(); +const uint8_t* arm::app::TestModel::ModelPointer() +{ + return GetModelPointer(); +} + +extern size_t GetModelLen(); +size_t arm::app::TestModel::ModelSize() +{ + return GetModelLen(); +}
\ No newline at end of file diff --git a/source/use_case/inference_runner/src/UseCaseHandler.cc b/source/use_case/inference_runner/src/UseCaseHandler.cc new file mode 100644 index 0000000..ac4ea47 --- /dev/null +++ b/source/use_case/inference_runner/src/UseCaseHandler.cc @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2021 Arm Limited. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "UseCaseHandler.hpp" + +#include "TestModel.hpp" +#include "UseCaseCommonUtils.hpp" +#include "hal.h" + +#include <cstdlib> + +namespace arm { +namespace app { + + bool RunInferenceHandler(ApplicationContext& ctx) + { + auto& platform = ctx.Get<hal_platform&>("platform"); + auto& model = ctx.Get<Model&>("model"); + + constexpr uint32_t dataPsnTxtInfStartX = 150; + constexpr uint32_t dataPsnTxtInfStartY = 40; + + if (!model.IsInited()) { + printf_err("Model is not initialised! Terminating processing.\n"); + return false; + } + + const size_t numInputs = model.GetNumInputs(); + + /* Populate each input tensor with random data. */ + for (size_t inputIndex = 0; inputIndex < numInputs; inputIndex++) { + + TfLiteTensor* inputTensor = model.GetInputTensor(inputIndex); + + debug("Populating input tensor %zu@%p\n", inputIndex, inputTensor); + debug("Total input size to be populated: %zu\n", inputTensor->bytes); + + /* Create a random input. */ + if (inputTensor->bytes > 0) { + + uint8_t* tData = tflite::GetTensorData<uint8_t>(inputTensor); + + for (size_t j = 0; j < inputTensor->bytes; ++j) { + tData[j] = static_cast<uint8_t>(std::rand() & 0xFF); + } + } + } + + /* Strings for presentation/logging. */ + std::string str_inf{"Running inference... "}; + + /* Display message on the LCD - inference running. */ + platform.data_psn->present_data_text( + str_inf.c_str(), str_inf.size(), + dataPsnTxtInfStartX, dataPsnTxtInfStartY, 0); + + RunInference(platform, model); + + /* Erase. */ + str_inf = std::string(str_inf.size(), ' '); + platform.data_psn->present_data_text( + str_inf.c_str(), str_inf.size(), + dataPsnTxtInfStartX, dataPsnTxtInfStartY, 0); + +#if VERIFY_TEST_OUTPUT + for (size_t outputIndex = 0; outputIndex < model.GetNumOutputs(); outputIndex++) { + arm::app::DumpTensor(model.GetOutputTensor(outputIndex)); + } +#endif /* VERIFY_TEST_OUTPUT */ + + return true; + } + +} /* namespace app */ +} /* namespace arm */ |