summaryrefslogtreecommitdiff
path: root/tests/use_case/kws_asr
diff options
context:
space:
mode:
Diffstat (limited to 'tests/use_case/kws_asr')
-rw-r--r--tests/use_case/kws_asr/InferenceTestDSCNN.cc9
-rw-r--r--tests/use_case/kws_asr/InferenceTestWav2Letter.cc5
2 files changed, 8 insertions, 6 deletions
diff --git a/tests/use_case/kws_asr/InferenceTestDSCNN.cc b/tests/use_case/kws_asr/InferenceTestDSCNN.cc
index 134003d..ad1731b 100644
--- a/tests/use_case/kws_asr/InferenceTestDSCNN.cc
+++ b/tests/use_case/kws_asr/InferenceTestDSCNN.cc
@@ -29,9 +29,9 @@ bool RunInference(arm::app::Model& model, const int8_t vec[]) {
TfLiteTensor* inputTensor = model.GetInputTensor(0);
REQUIRE(inputTensor);
- const size_t copySz = inputTensor->bytes < IFM_DATA_SIZE ?
+ const size_t copySz = inputTensor->bytes < IFM_0_DATA_SIZE ?
inputTensor->bytes :
- IFM_DATA_SIZE;
+ IFM_0_DATA_SIZE;
memcpy(inputTensor->data.data, vec, copySz);
return model.RunInference();
@@ -63,7 +63,7 @@ void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::
TfLiteTensor* outputTensor = model.GetOutputTensor(0);
REQUIRE(outputTensor);
- REQUIRE(outputTensor->bytes == OFM_DATA_SIZE);
+ REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
auto tensorData = tflite::GetTensorData<T>(outputTensor);
REQUIRE(tensorData);
@@ -83,7 +83,8 @@ TEST_CASE("Running random inference with Tflu and DsCnnModel Int8", "[DS_CNN]")
}
TEST_CASE("Running inference with Tflu and DsCnnModel Uint8", "[DS_CNN]") {
- for (uint32_t i = 0; i < NUMBER_OF_FM_FILES; ++i) {
+ REQUIRE(NUMBER_OF_IFM_FILES == NUMBER_OF_OFM_FILES);
+ for (uint32_t i = 0; i < NUMBER_OF_IFM_FILES; ++i) {
const int8_t* input_goldenFV = get_ifm_data_array(i);
const int8_t* output_goldenFV = get_ofm_data_array(i);
diff --git a/tests/use_case/kws_asr/InferenceTestWav2Letter.cc b/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
index 1b14a42..477a1dd 100644
--- a/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
+++ b/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
@@ -78,7 +78,7 @@ void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::
TfLiteTensor* outputTensor = model.GetOutputTensor(0);
REQUIRE(outputTensor);
- REQUIRE(outputTensor->bytes == OFM_DATA_SIZE);
+ REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
auto tensorData = tflite::GetTensorData<T>(outputTensor);
REQUIRE(tensorData);
@@ -89,7 +89,8 @@ void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::
TEST_CASE("Running inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter]")
{
- for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
+ REQUIRE(NUMBER_OF_IFM_FILES == NUMBER_OF_OFM_FILES);
+ for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
auto input_goldenFV = get_ifm_data_array(i);;
auto output_goldenFV = get_ofm_data_array(i);