summaryrefslogtreecommitdiff
path: root/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
diff options
context:
space:
mode:
Diffstat (limited to 'tests/use_case/kws_asr/InferenceTestWav2Letter.cc')
-rw-r--r--tests/use_case/kws_asr/InferenceTestWav2Letter.cc147
1 files changed, 72 insertions, 75 deletions
diff --git a/tests/use_case/kws_asr/InferenceTestWav2Letter.cc b/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
index 72dcadc..b49b886 100644
--- a/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
+++ b/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
@@ -1,6 +1,6 @@
/*
- * SPDX-FileCopyrightText: Copyright 2021 Arm Limited and/or its affiliates <open-source-office@arm.com>
- * SPDX-License-Identifier: Apache-2.0
+ * SPDX-FileCopyrightText: Copyright 2021 Arm Limited and/or its affiliates
+ * <open-source-office@arm.com> SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,10 +14,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+#include "BufAttributes.hpp"
#include "TensorFlowLiteMicro.hpp"
-#include "Wav2LetterModel.hpp"
#include "TestData_asr.hpp"
-#include "BufAttributes.hpp"
+#include "Wav2LetterModel.hpp"
#include <catch.hpp>
#include <random>
@@ -35,94 +35,91 @@ namespace app {
namespace test {
namespace asr {
-bool RunInference(arm::app::Model& model, const int8_t vec[], const size_t copySz)
-{
- TfLiteTensor* inputTensor = model.GetInputTensor(0);
- REQUIRE(inputTensor);
+ bool RunInference(arm::app::Model& model, const int8_t vec[], const size_t copySz)
+ {
+ TfLiteTensor* inputTensor = model.GetInputTensor(0);
+ REQUIRE(inputTensor);
- memcpy(inputTensor->data.data, vec, copySz);
+ memcpy(inputTensor->data.data, vec, copySz);
- return model.RunInference();
-}
-
-bool RunInferenceRandom(arm::app::Model& model)
-{
- TfLiteTensor* inputTensor = model.GetInputTensor(0);
- REQUIRE(inputTensor);
+ return model.RunInference();
+ }
- std::random_device rndDevice;
- std::mt19937 mersenneGen{rndDevice()};
- std::uniform_int_distribution<short> dist {-128, 127};
+ bool RunInferenceRandom(arm::app::Model& model)
+ {
+ TfLiteTensor* inputTensor = model.GetInputTensor(0);
+ REQUIRE(inputTensor);
- auto gen = [&dist, &mersenneGen](){
- return dist(mersenneGen);
- };
+ std::random_device rndDevice;
+ std::mt19937 mersenneGen{rndDevice()};
+ std::uniform_int_distribution<short> dist{-128, 127};
- std::vector<int8_t> randomAudio(inputTensor->bytes);
- std::generate(std::begin(randomAudio), std::end(randomAudio), gen);
+ auto gen = [&dist, &mersenneGen]() { return dist(mersenneGen); };
- REQUIRE(RunInference(model, randomAudio.data(), inputTensor->bytes));
- return true;
-}
+ std::vector<int8_t> randomAudio(inputTensor->bytes);
+ std::generate(std::begin(randomAudio), std::end(randomAudio), gen);
-TEST_CASE("Running random inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter]")
-{
- arm::app::Wav2LetterModel model{};
+ REQUIRE(RunInference(model, randomAudio.data(), inputTensor->bytes));
+ return true;
+ }
- REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init(arm::app::tensorArena,
- sizeof(arm::app::tensorArena),
- arm::app::asr::GetModelPointer(),
- arm::app::asr::GetModelLen()));
- REQUIRE(model.IsInited());
+ TEST_CASE("Running random inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter]")
+ {
+ arm::app::Wav2LetterModel model{};
- REQUIRE(RunInferenceRandom(model));
-}
+ REQUIRE_FALSE(model.IsInited());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen()));
+ REQUIRE(model.IsInited());
+ REQUIRE(RunInferenceRandom(model));
+ }
-template<typename T>
-void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::Model& model)
-{
- TfLiteTensor* inputTensor = model.GetInputTensor(0);
- REQUIRE(inputTensor);
+ template <typename T>
+ void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::Model& model)
+ {
+ TfLiteTensor* inputTensor = model.GetInputTensor(0);
+ REQUIRE(inputTensor);
- REQUIRE(RunInference(model, input_goldenFV, inputTensor->bytes));
+ REQUIRE(RunInference(model, input_goldenFV, inputTensor->bytes));
- TfLiteTensor* outputTensor = model.GetOutputTensor(0);
+ TfLiteTensor* outputTensor = model.GetOutputTensor(0);
- REQUIRE(outputTensor);
- REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
- auto tensorData = tflite::GetTensorData<T>(outputTensor);
- REQUIRE(tensorData);
+ REQUIRE(outputTensor);
+ REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
+ auto tensorData = tflite::GetTensorData<T>(outputTensor);
+ REQUIRE(tensorData);
- for (size_t i = 0; i < outputTensor->bytes; i++) {
- REQUIRE(static_cast<int>(tensorData[i]) == static_cast<int>(((T)output_goldenFV[i])));
+ for (size_t i = 0; i < outputTensor->bytes; i++) {
+ REQUIRE(static_cast<int>(tensorData[i]) == static_cast<int>(((T)output_goldenFV[i])));
+ }
}
-}
-
-TEST_CASE("Running inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter]")
-{
- REQUIRE(NUMBER_OF_IFM_FILES == NUMBER_OF_OFM_FILES);
- for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
- auto input_goldenFV = get_ifm_data_array(i);;
- auto output_goldenFV = get_ofm_data_array(i);
-
- DYNAMIC_SECTION("Executing inference with re-init")
- {
- arm::app::Wav2LetterModel model{};
-
- REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init(arm::app::tensorArena,
- sizeof(arm::app::tensorArena),
- arm::app::asr::GetModelPointer(),
- arm::app::asr::GetModelLen()));
- REQUIRE(model.IsInited());
-
- TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
+ TEST_CASE("Running inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter]")
+ {
+ REQUIRE(NUMBER_OF_IFM_FILES == NUMBER_OF_OFM_FILES);
+ for (uint32_t i = 0; i < NUMBER_OF_IFM_FILES; ++i) {
+ auto input_goldenFV = GetIfmDataArray(i);
+ ;
+ auto output_goldenFV = GetOfmDataArray(i);
+
+ DYNAMIC_SECTION("Executing inference with re-init")
+ {
+ arm::app::Wav2LetterModel model{};
+
+ REQUIRE_FALSE(model.IsInited());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen()));
+ REQUIRE(model.IsInited());
+
+ TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
+ }
}
}
-}
-} //namespace
-} //namespace
+} // namespace asr
+} // namespace test