summaryrefslogtreecommitdiff
path: root/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc
diff options
context:
space:
mode:
Diffstat (limited to 'tests/use_case/kws_asr/InferenceTestMicroNetKws.cc')
-rw-r--r--tests/use_case/kws_asr/InferenceTestMicroNetKws.cc140
1 files changed, 71 insertions, 69 deletions
diff --git a/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc b/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc
index 9fd8171..4cfd784 100644
--- a/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc
+++ b/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc
@@ -1,6 +1,6 @@
/*
- * SPDX-FileCopyrightText: Copyright 2021 Arm Limited and/or its affiliates <open-source-office@arm.com>
- * SPDX-License-Identifier: Apache-2.0
+ * SPDX-FileCopyrightText: Copyright 2021 Arm Limited and/or its affiliates
+ * <open-source-office@arm.com> SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,10 +14,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#include "BufAttributes.hpp"
#include "MicroNetKwsModel.hpp"
-#include "TestData_kws.hpp"
#include "TensorFlowLiteMicro.hpp"
-#include "BufAttributes.hpp"
+#include "TestData_kws.hpp"
#include <catch.hpp>
#include <random>
@@ -35,87 +35,89 @@ namespace app {
namespace test {
namespace kws {
-bool RunInference(arm::app::Model& model, const int8_t vec[]) {
- TfLiteTensor* inputTensor = model.GetInputTensor(0);
- REQUIRE(inputTensor);
-
- const size_t copySz = inputTensor->bytes < IFM_0_DATA_SIZE ?
- inputTensor->bytes :
- IFM_0_DATA_SIZE;
- memcpy(inputTensor->data.data, vec, copySz);
-
- return model.RunInference();
-}
+ bool RunInference(arm::app::Model& model, const int8_t vec[])
+ {
+ TfLiteTensor* inputTensor = model.GetInputTensor(0);
+ REQUIRE(inputTensor);
-bool RunInferenceRandom(arm::app::Model& model) {
- TfLiteTensor* inputTensor = model.GetInputTensor(0);
- REQUIRE(inputTensor);
+ const size_t copySz =
+ inputTensor->bytes < IFM_0_DATA_SIZE ? inputTensor->bytes : IFM_0_DATA_SIZE;
+ memcpy(inputTensor->data.data, vec, copySz);
- std::random_device rndDevice;
- std::mt19937 mersenneGen{rndDevice()};
- std::uniform_int_distribution<short> dist{-128, 127};
-
- auto gen = [&dist, &mersenneGen]() {
- return dist(mersenneGen);
- };
-
- std::vector<int8_t> randomAudio(inputTensor->bytes);
- std::generate(std::begin(randomAudio), std::end(randomAudio), gen);
+ return model.RunInference();
+ }
- REQUIRE(RunInference(model, randomAudio.data()));
- return true;
-}
+ bool RunInferenceRandom(arm::app::Model& model)
+ {
+ TfLiteTensor* inputTensor = model.GetInputTensor(0);
+ REQUIRE(inputTensor);
-template<typename T>
-void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::Model& model) {
- REQUIRE(RunInference(model, input_goldenFV));
+ std::random_device rndDevice;
+ std::mt19937 mersenneGen{rndDevice()};
+ std::uniform_int_distribution<short> dist{-128, 127};
- TfLiteTensor* outputTensor = model.GetOutputTensor(0);
+ auto gen = [&dist, &mersenneGen]() { return dist(mersenneGen); };
- REQUIRE(outputTensor);
- REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
- auto tensorData = tflite::GetTensorData<T>(outputTensor);
- REQUIRE(tensorData);
+ std::vector<int8_t> randomAudio(inputTensor->bytes);
+ std::generate(std::begin(randomAudio), std::end(randomAudio), gen);
- for (size_t i = 0; i < outputTensor->bytes; i++) {
- REQUIRE(static_cast<int>(tensorData[i]) == static_cast<int>((T) output_goldenFV[i]));
+ REQUIRE(RunInference(model, randomAudio.data()));
+ return true;
}
-}
-TEST_CASE("Running random inference with Tflu and MicroNetKwsModel Int8", "[MicroNetKws]") {
- arm::app::MicroNetKwsModel model{};
+ template <typename T>
+ void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::Model& model)
+ {
+ REQUIRE(RunInference(model, input_goldenFV));
- REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init(arm::app::tensorArena,
- sizeof(arm::app::tensorArena),
- arm::app::kws::GetModelPointer(),
- arm::app::kws::GetModelLen()));
- REQUIRE(model.IsInited());
+ TfLiteTensor* outputTensor = model.GetOutputTensor(0);
- REQUIRE(RunInferenceRandom(model));
-}
+ REQUIRE(outputTensor);
+ REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
+ auto tensorData = tflite::GetTensorData<T>(outputTensor);
+ REQUIRE(tensorData);
-TEST_CASE("Running inference with Tflu and MicroNetKwsModel Int8", "[MicroNetKws]") {
- REQUIRE(NUMBER_OF_IFM_FILES == NUMBER_OF_OFM_FILES);
- for (uint32_t i = 0; i < NUMBER_OF_IFM_FILES; ++i) {
- const int8_t* input_goldenFV = get_ifm_data_array(i);
- const int8_t* output_goldenFV = get_ofm_data_array(i);
+ for (size_t i = 0; i < outputTensor->bytes; i++) {
+ REQUIRE(static_cast<int>(tensorData[i]) == static_cast<int>((T)output_goldenFV[i]));
+ }
+ }
- DYNAMIC_SECTION("Executing inference with re-init") {
- arm::app::MicroNetKwsModel model{};
+ TEST_CASE("Running random inference with Tflu and MicroNetKwsModel Int8", "[MicroNetKws]")
+ {
+ arm::app::MicroNetKwsModel model{};
- REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init(arm::app::tensorArena,
- sizeof(arm::app::tensorArena),
- arm::app::kws::GetModelPointer(),
- arm::app::kws::GetModelLen()));
- REQUIRE(model.IsInited());
+ REQUIRE_FALSE(model.IsInited());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
+ REQUIRE(model.IsInited());
- TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
+ REQUIRE(RunInferenceRandom(model));
+ }
+ TEST_CASE("Running inference with Tflu and MicroNetKwsModel Int8", "[MicroNetKws]")
+ {
+ REQUIRE(NUMBER_OF_IFM_FILES == NUMBER_OF_OFM_FILES);
+ for (uint32_t i = 0; i < NUMBER_OF_IFM_FILES; ++i) {
+ const int8_t* input_goldenFV = GetIfmDataArray(i);
+ const int8_t* output_goldenFV = GetOfmDataArray(i);
+
+ DYNAMIC_SECTION("Executing inference with re-init")
+ {
+ arm::app::MicroNetKwsModel model{};
+
+ REQUIRE_FALSE(model.IsInited());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
+ REQUIRE(model.IsInited());
+
+ TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
+ }
}
}
-}
-} //namespace
-} //namespace
+} // namespace kws
+} // namespace test