summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorKshitij Sisodia <kshitij.sisodia@arm.com>2022-05-06 09:13:03 +0100
committerKshitij Sisodia <kshitij.sisodia@arm.com>2022-05-06 17:11:41 +0100
commitaa4bcb14d0cbee910331545dd2fc086b58c37170 (patch)
treee67a43a43f61c6f8b6aad19018b0827baf7e31a6 /tests
parentfcca863bafd5f33522bc14c23dde4540e264ec94 (diff)
downloadml-embedded-evaluation-kit-aa4bcb14d0cbee910331545dd2fc086b58c37170.tar.gz
MLECO-3183: Refactoring application sources
Platform agnostic application sources are moved into application api module with their own independent CMake projects. Changes for MLECO-3080 also included - they create CMake projects individial API's (again, platform agnostic) that dependent on the common logic. The API for KWS_API "joint" API has been removed and now the use case relies on individual KWS, and ASR API libraries. Change-Id: I1f7748dc767abb3904634a04e0991b74ac7b756d Signed-off-by: Kshitij Sisodia <kshitij.sisodia@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/use_case/ad/InferenceTestAD.cc22
-rw-r--r--tests/use_case/asr/InferenceTestWav2Letter.cc23
-rw-r--r--tests/use_case/asr/Wav2LetterPostprocessingTest.cc27
-rw-r--r--tests/use_case/img_class/ImgClassificationUCTest.cc31
-rw-r--r--tests/use_case/img_class/InferenceTestMobilenetV2.cc20
-rw-r--r--tests/use_case/kws/InferenceTestMicroNetKws.cc22
-rw-r--r--tests/use_case/kws/KWSHandlerTest.cc40
-rw-r--r--tests/use_case/kws_asr/InferenceTestMicroNetKws.cc23
-rw-r--r--tests/use_case/kws_asr/InferenceTestWav2Letter.cc22
-rw-r--r--tests/use_case/kws_asr/InitModels.cc29
-rw-r--r--tests/use_case/kws_asr/Wav2LetterPostprocessingTest.cc31
-rw-r--r--tests/use_case/noise_reduction/InferenceTestRNNoise.cc20
-rw-r--r--tests/use_case/noise_reduction/RNNNoiseUCTests.cc24
-rw-r--r--tests/use_case/noise_reduction/RNNoiseModelTests.cc27
-rw-r--r--tests/use_case/object_detection/InferenceTestYoloFastest.cc19
-rw-r--r--tests/use_case/object_detection/ObjectDetectionUCTest.cc30
-rw-r--r--tests/use_case/vww/VisualWakeWordUCTests.cc31
17 files changed, 383 insertions, 58 deletions
diff --git a/tests/use_case/ad/InferenceTestAD.cc b/tests/use_case/ad/InferenceTestAD.cc
index 6a1813f..d837617 100644
--- a/tests/use_case/ad/InferenceTestAD.cc
+++ b/tests/use_case/ad/InferenceTestAD.cc
@@ -22,11 +22,21 @@
#include "TestData_ad.hpp"
#include "log_macros.h"
#include "TensorFlowLiteMicro.hpp"
+#include "BufAttributes.hpp"
#ifndef AD_FEATURE_VEC_DATA_SIZE
#define AD_IN_FEATURE_VEC_DATA_SIZE (1024)
#endif /* AD_FEATURE_VEC_DATA_SIZE */
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
using namespace test;
bool RunInference(arm::app::Model& model, const int8_t vec[])
@@ -84,7 +94,10 @@ TEST_CASE("Running random inference with TensorFlow Lite Micro and AdModel Int8"
arm::app::AdModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
REQUIRE(RunInferenceRandom(model));
@@ -102,11 +115,14 @@ TEST_CASE("Running golden vector inference with TensorFlow Lite Micro and AdMode
arm::app::AdModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
}
}
-} \ No newline at end of file
+}
diff --git a/tests/use_case/asr/InferenceTestWav2Letter.cc b/tests/use_case/asr/InferenceTestWav2Letter.cc
index 53c92ab..643f805 100644
--- a/tests/use_case/asr/InferenceTestWav2Letter.cc
+++ b/tests/use_case/asr/InferenceTestWav2Letter.cc
@@ -17,10 +17,21 @@
#include "TensorFlowLiteMicro.hpp"
#include "Wav2LetterModel.hpp"
#include "TestData_asr.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
#include <random>
+namespace arm {
+namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ namespace asr {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ } /* namespace asr */
+} /* namespace app */
+} /* namespace arm */
+
using namespace test;
bool RunInference(arm::app::Model& model, const int8_t vec[], const size_t copySz)
@@ -58,7 +69,10 @@ TEST_CASE("Running random inference with TensorFlow Lite Micro and Wav2LetterMod
arm::app::Wav2LetterModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen()));
REQUIRE(model.IsInited());
REQUIRE(RunInferenceRandom(model));
@@ -96,11 +110,14 @@ TEST_CASE("Running inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter]"
arm::app::Wav2LetterModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen()));
REQUIRE(model.IsInited());
TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
}
}
-} \ No newline at end of file
+}
diff --git a/tests/use_case/asr/Wav2LetterPostprocessingTest.cc b/tests/use_case/asr/Wav2LetterPostprocessingTest.cc
index 11c4919..9c3d658 100644
--- a/tests/use_case/asr/Wav2LetterPostprocessingTest.cc
+++ b/tests/use_case/asr/Wav2LetterPostprocessingTest.cc
@@ -17,11 +17,23 @@
#include "Wav2LetterPostprocess.hpp"
#include "Wav2LetterModel.hpp"
#include "ClassificationResult.hpp"
+#include "BufAttributes.hpp"
#include <algorithm>
#include <catch.hpp>
#include <limits>
+namespace arm {
+namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+
+ namespace asr {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ } /* namespace asr */
+} /* namespace app */
+} /* namespace arm */
+
template <typename T>
static TfLiteTensor GetTestTensor(
std::vector<int>& shape,
@@ -51,7 +63,10 @@ TEST_CASE("Checking return value")
const uint32_t outputCtxLen = 5;
arm::app::AsrClassifier classifier;
arm::app::Wav2LetterModel model;
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen());
std::vector<std::string> dummyLabels = {"a", "b", "$"};
const uint32_t blankTokenIdx = 2;
std::vector<arm::app::ClassificationResult> dummyResult;
@@ -71,7 +86,10 @@ TEST_CASE("Checking return value")
const uint32_t outputCtxLen = 5;
arm::app::AsrClassifier classifier;
arm::app::Wav2LetterModel model;
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen());
std::vector<std::string> dummyLabels = {"a", "b", "$"};
const uint32_t blankTokenIdx = 2;
std::vector<arm::app::ClassificationResult> dummyResult;
@@ -102,7 +120,10 @@ TEST_CASE("Postprocessing - erasing required elements")
std::vector<int> tensorShape = {1, 1, nRows, nCols};
arm::app::AsrClassifier classifier;
arm::app::Wav2LetterModel model;
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen());
std::vector<std::string> dummyLabels = {"a", "b", "$"};
std::vector<arm::app::ClassificationResult> dummyResult;
diff --git a/tests/use_case/img_class/ImgClassificationUCTest.cc b/tests/use_case/img_class/ImgClassificationUCTest.cc
index b9caf61..d8339b6 100644
--- a/tests/use_case/img_class/ImgClassificationUCTest.cc
+++ b/tests/use_case/img_class/ImgClassificationUCTest.cc
@@ -24,13 +24,25 @@
#include <catch.hpp>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
TEST_CASE("Model info")
{
/* Model wrapper object. */
arm::app::MobileNetModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -50,7 +62,10 @@ TEST_CASE("Inference by index", "[.]")
arm::app::MobileNetModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -83,7 +98,10 @@ TEST_CASE("Inference run all images", "[.]")
arm::app::MobileNetModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -112,11 +130,14 @@ TEST_CASE("List all images")
arm::app::MobileNetModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
caseContext.Set<arm::app::Model&>("model", model);
REQUIRE(arm::app::ListFilesHandler(caseContext));
-} \ No newline at end of file
+}
diff --git a/tests/use_case/img_class/InferenceTestMobilenetV2.cc b/tests/use_case/img_class/InferenceTestMobilenetV2.cc
index 7e7508b..30ce19f 100644
--- a/tests/use_case/img_class/InferenceTestMobilenetV2.cc
+++ b/tests/use_case/img_class/InferenceTestMobilenetV2.cc
@@ -18,9 +18,19 @@
#include "MobileNetModel.hpp"
#include "TensorFlowLiteMicro.hpp"
#include "TestData_img_class.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
using namespace test;
bool RunInference(arm::app::Model& model, const int8_t imageData[])
@@ -67,7 +77,10 @@ TEST_CASE("Running inference with TensorFlow Lite Micro and MobileNeV2 Uint8", "
arm::app::MobileNetModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
@@ -81,7 +94,10 @@ TEST_CASE("Running inference with TensorFlow Lite Micro and MobileNeV2 Uint8", "
arm::app::MobileNetModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
TestInference<uint8_t>(i, model, 1);
diff --git a/tests/use_case/kws/InferenceTestMicroNetKws.cc b/tests/use_case/kws/InferenceTestMicroNetKws.cc
index 41ecc3c..a6f7a03 100644
--- a/tests/use_case/kws/InferenceTestMicroNetKws.cc
+++ b/tests/use_case/kws/InferenceTestMicroNetKws.cc
@@ -17,10 +17,22 @@
#include "MicroNetKwsModel.hpp"
#include "TestData_kws.hpp"
#include "TensorFlowLiteMicro.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
#include <random>
+namespace arm {
+namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+
+ namespace kws {
+ extern uint8_t *GetModelPointer();
+ extern size_t GetModelLen();
+ } /* namespace kws */
+} /* namespace app */
+} /* namespace arm */
+
using namespace test;
bool RunInference(arm::app::Model& model, const int8_t vec[])
@@ -78,7 +90,10 @@ TEST_CASE("Running random inference with TensorFlow Lite Micro and MicroNetKwsMo
arm::app::MicroNetKwsModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
REQUIRE(model.IsInited());
REQUIRE(RunInferenceRandom(model));
@@ -96,7 +111,10 @@ TEST_CASE("Running inference with TensorFlow Lite Micro and MicroNetKwsModel int
arm::app::MicroNetKwsModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
REQUIRE(model.IsInited());
TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
diff --git a/tests/use_case/kws/KWSHandlerTest.cc b/tests/use_case/kws/KWSHandlerTest.cc
index c24faa4..d9d00a8 100644
--- a/tests/use_case/kws/KWSHandlerTest.cc
+++ b/tests/use_case/kws/KWSHandlerTest.cc
@@ -24,13 +24,26 @@
#include "Classifier.hpp"
#include "UseCaseCommonUtils.hpp"
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ namespace kws {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ }
+ } /* namespace app */
+} /* namespace arm */
+
TEST_CASE("Model info")
{
/* Model wrapper object. */
arm::app::MicroNetKwsModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -50,7 +63,10 @@ TEST_CASE("Inference by index")
arm::app::MicroNetKwsModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -58,8 +74,8 @@ TEST_CASE("Inference by index")
arm::app::Profiler profiler{"kws"};
caseContext.Set<arm::app::Profiler&>("profiler", profiler);
caseContext.Set<arm::app::Model&>("model", model);
- caseContext.Set<int>("frameLength", g_FrameLength); /* 640 sample length for MicroNetKws. */
- caseContext.Set<int>("frameStride", g_FrameStride); /* 320 sample stride for MicroNetKws. */
+ caseContext.Set<int>("frameLength", arm::app::kws::g_FrameLength); /* 640 sample length for MicroNetKws. */
+ caseContext.Set<int>("frameStride", arm::app::kws::g_FrameStride); /* 320 sample stride for MicroNetKws. */
caseContext.Set<float>("scoreThreshold", 0.5); /* Normalised score threshold. */
arm::app::Classifier classifier; /* classifier wrapper object. */
@@ -122,7 +138,10 @@ TEST_CASE("Inference run all clips")
arm::app::MicroNetKwsModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -131,8 +150,8 @@ TEST_CASE("Inference run all clips")
caseContext.Set<arm::app::Profiler&>("profiler", profiler);
caseContext.Set<arm::app::Model&>("model", model);
caseContext.Set<uint32_t>("clipIndex", 0);
- caseContext.Set<int>("frameLength", g_FrameLength); /* 640 sample length for MicroNet. */
- caseContext.Set<int>("frameStride", g_FrameStride); /* 320 sample stride for MicroNet. */
+ caseContext.Set<int>("frameLength", arm::app::kws::g_FrameLength); /* 640 sample length for MicroNet. */
+ caseContext.Set<int>("frameStride", arm::app::kws::g_FrameStride); /* 320 sample stride for MicroNet. */
caseContext.Set<float>("scoreThreshold", 0.7); /* Normalised score threshold. */
arm::app::Classifier classifier; /* classifier wrapper object. */
caseContext.Set<arm::app::Classifier&>("classifier", classifier);
@@ -153,7 +172,10 @@ TEST_CASE("List all audio clips")
arm::app::MicroNetKwsModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -161,4 +183,4 @@ TEST_CASE("List all audio clips")
caseContext.Set<arm::app::Model&>("model", model);
REQUIRE(arm::app::ListFilesHandler(caseContext));
-} \ No newline at end of file
+}
diff --git a/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc b/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc
index a493021..4ba4693 100644
--- a/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc
+++ b/tests/use_case/kws_asr/InferenceTestMicroNetKws.cc
@@ -17,10 +17,21 @@
#include "MicroNetKwsModel.hpp"
#include "TestData_kws.hpp"
#include "TensorFlowLiteMicro.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
#include <random>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ namespace kws {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ }
+ } /* namespace app */
+} /* namespace arm */
+
namespace test {
namespace kws {
@@ -75,7 +86,10 @@ TEST_CASE("Running random inference with Tflu and MicroNetKwsModel Int8", "[Micr
arm::app::MicroNetKwsModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
REQUIRE(model.IsInited());
REQUIRE(RunInferenceRandom(model));
@@ -91,7 +105,10 @@ TEST_CASE("Running inference with Tflu and MicroNetKwsModel Int8", "[MicroNetKws
arm::app::MicroNetKwsModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
REQUIRE(model.IsInited());
TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
@@ -101,4 +118,4 @@ TEST_CASE("Running inference with Tflu and MicroNetKwsModel Int8", "[MicroNetKws
}
} //namespace
-} //namespace \ No newline at end of file
+} //namespace
diff --git a/tests/use_case/kws_asr/InferenceTestWav2Letter.cc b/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
index 1c5f20a..5d30211 100644
--- a/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
+++ b/tests/use_case/kws_asr/InferenceTestWav2Letter.cc
@@ -17,10 +17,22 @@
#include "TensorFlowLiteMicro.hpp"
#include "Wav2LetterModel.hpp"
#include "TestData_asr.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
#include <random>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+
+ namespace asr {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ }
+ } /* namespace app */
+} /* namespace arm */
+
namespace test {
namespace asr {
@@ -59,7 +71,10 @@ TEST_CASE("Running random inference with Tflu and Wav2LetterModel Int8", "[Wav2L
arm::app::Wav2LetterModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen()));
REQUIRE(model.IsInited());
REQUIRE(RunInferenceRandom(model));
@@ -98,7 +113,10 @@ TEST_CASE("Running inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter]"
arm::app::Wav2LetterModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen()));
REQUIRE(model.IsInited());
TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
diff --git a/tests/use_case/kws_asr/InitModels.cc b/tests/use_case/kws_asr/InitModels.cc
index 97aa092..85841a3 100644
--- a/tests/use_case/kws_asr/InitModels.cc
+++ b/tests/use_case/kws_asr/InitModels.cc
@@ -16,9 +16,25 @@
*/
#include "MicroNetKwsModel.hpp"
#include "Wav2LetterModel.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+
+ namespace asr {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ }
+ namespace kws {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ }
+ } /* namespace app */
+} /* namespace arm */
+
/* Skip this test, Wav2LetterModel if not Vela optimized but only from ML-zoo will fail. */
TEST_CASE("Init two Models", "[.]")
{
@@ -35,13 +51,20 @@ TEST_CASE("Init two Models", "[.]")
//arm::app::Wav2LetterModel model2; /* model2. */
/* Load/initialise the first model. */
- REQUIRE(model1.Init());
+ REQUIRE(model1.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::kws::GetModelPointer(),
+ arm::app::kws::GetModelLen()));
/* Allocator instance should have been created. */
REQUIRE(nullptr != model1.GetAllocator());
/* Load the second model using the same allocator as model 1. */
- REQUIRE(model2.Init(model1.GetAllocator()));
+ REQUIRE(model2.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen(),
+ model1.GetAllocator()));
/* Make sure they point to the same allocator object. */
REQUIRE(model1.GetAllocator() == model2.GetAllocator());
@@ -49,4 +72,4 @@ TEST_CASE("Init two Models", "[.]")
/* Both models should report being initialised. */
REQUIRE(true == model1.IsInited());
REQUIRE(true == model2.IsInited());
-} \ No newline at end of file
+}
diff --git a/tests/use_case/kws_asr/Wav2LetterPostprocessingTest.cc b/tests/use_case/kws_asr/Wav2LetterPostprocessingTest.cc
index e343b66..d2071ea 100644
--- a/tests/use_case/kws_asr/Wav2LetterPostprocessingTest.cc
+++ b/tests/use_case/kws_asr/Wav2LetterPostprocessingTest.cc
@@ -17,11 +17,27 @@
#include "Wav2LetterPostprocess.hpp"
#include "Wav2LetterModel.hpp"
#include "ClassificationResult.hpp"
+#include "BufAttributes.hpp"
#include <algorithm>
#include <catch.hpp>
#include <limits>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+
+ namespace asr {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ }
+ namespace kws {
+ extern uint8_t* GetModelPointer();
+ extern size_t GetModelLen();
+ }
+ } /* namespace app */
+} /* namespace arm */
+
template <typename T>
static TfLiteTensor GetTestTensor(
std::vector<int>& shape,
@@ -51,7 +67,10 @@ TEST_CASE("Checking return value")
const uint32_t outputCtxLen = 5;
arm::app::AsrClassifier classifier;
arm::app::Wav2LetterModel model;
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen());
std::vector<std::string> dummyLabels = {"a", "b", "$"};
const uint32_t blankTokenIdx = 2;
std::vector<arm::app::ClassificationResult> dummyResult;
@@ -71,7 +90,10 @@ TEST_CASE("Checking return value")
const uint32_t outputCtxLen = 5;
arm::app::AsrClassifier classifier;
arm::app::Wav2LetterModel model;
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen());
std::vector<std::string> dummyLabels = {"a", "b", "$"};
const uint32_t blankTokenIdx = 2;
std::vector<arm::app::ClassificationResult> dummyResult;
@@ -102,7 +124,10 @@ TEST_CASE("Postprocessing - erasing required elements")
std::vector<int> tensorShape = {1, 1, nRows, nCols};
arm::app::AsrClassifier classifier;
arm::app::Wav2LetterModel model;
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ arm::app::asr::GetModelPointer(),
+ arm::app::asr::GetModelLen());
std::vector<std::string> dummyLabels = {"a", "b", "$"};
std::vector<arm::app::ClassificationResult> dummyResult;
diff --git a/tests/use_case/noise_reduction/InferenceTestRNNoise.cc b/tests/use_case/noise_reduction/InferenceTestRNNoise.cc
index 4c9786f..9dc640b 100644
--- a/tests/use_case/noise_reduction/InferenceTestRNNoise.cc
+++ b/tests/use_case/noise_reduction/InferenceTestRNNoise.cc
@@ -17,10 +17,20 @@
#include "TensorFlowLiteMicro.hpp"
#include "RNNoiseModel.hpp"
#include "TestData_noise_reduction.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
#include <random>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
namespace test {
namespace rnnoise {
@@ -62,7 +72,10 @@ namespace rnnoise {
arm::app::RNNoiseModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
REQUIRE(RunInferenceRandom(model));
@@ -121,7 +134,10 @@ namespace rnnoise {
arm::app::RNNoiseModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
TestInference<int8_t>(goldenInputFV, goldenOutputFV, model);
diff --git a/tests/use_case/noise_reduction/RNNNoiseUCTests.cc b/tests/use_case/noise_reduction/RNNNoiseUCTests.cc
index cc1b4d7..bebfdfd 100644
--- a/tests/use_case/noise_reduction/RNNNoiseUCTests.cc
+++ b/tests/use_case/noise_reduction/RNNNoiseUCTests.cc
@@ -24,6 +24,15 @@
#include <hal.h>
#include <Profiler.hpp>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
#define PLATFORM hal_platform_init();
#define CONTEXT \
@@ -38,7 +47,10 @@ TEST_CASE("Verify output tensor memory dump")
std::vector<uint8_t> memPool(maxMemDumpSz); /* Memory pool */
arm::app::RNNoiseModel model{};
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
/* Populate the output tensors */
@@ -105,7 +117,10 @@ TEST_CASE("Inference run all clips", "[RNNoise]")
caseContext.Set<uint32_t>("frameStride", g_FrameStride);
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(arm::app::NoiseReductionHandler(caseContext, true));
}
@@ -136,7 +151,10 @@ void testInfByIndex(std::vector<uint32_t>& numberOfInferences) {
caseContext.Set<uint32_t>("frameStride", g_FrameStride);
caseContext.Set<uint32_t>("numInputFeatures", g_NumInputFeatures);
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
size_t oneInferenceOutSizeBytes = g_FrameLength * sizeof(int16_t);
diff --git a/tests/use_case/noise_reduction/RNNoiseModelTests.cc b/tests/use_case/noise_reduction/RNNoiseModelTests.cc
index 7798975..9720ba5 100644
--- a/tests/use_case/noise_reduction/RNNoiseModelTests.cc
+++ b/tests/use_case/noise_reduction/RNNoiseModelTests.cc
@@ -17,10 +17,20 @@
#include "RNNoiseModel.hpp"
#include "TensorFlowLiteMicro.hpp"
#include "TestData_noise_reduction.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
#include <random>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
bool RunInference(arm::app::Model& model, std::vector<int8_t> vec,
const size_t sizeRequired, const size_t dataInputIndex)
{
@@ -61,7 +71,10 @@ TEST_CASE("Running random inference with TensorFlow Lite Micro and RNNoiseModel
arm::app::RNNoiseModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
model.ResetGruState();
@@ -114,7 +127,10 @@ void printArray(size_t dataSz, T data){
TEST_CASE("Test initial GRU out state is 0", "[RNNoise]")
{
TestRNNoiseModel model{};
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen());
auto map = model.GetStateMap();
@@ -135,7 +151,10 @@ TEST_CASE("Test initial GRU out state is 0", "[RNNoise]")
TEST_CASE("Test GRU state copy", "[RNNoise]")
{
TestRNNoiseModel model{};
- model.Init();
+ model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen());
REQUIRE(RunInferenceRandom(model, 0));
auto map = model.GetStateMap();
@@ -162,4 +181,4 @@ TEST_CASE("Test GRU state copy", "[RNNoise]")
statesIter++;
}
-} \ No newline at end of file
+}
diff --git a/tests/use_case/object_detection/InferenceTestYoloFastest.cc b/tests/use_case/object_detection/InferenceTestYoloFastest.cc
index 2c035e7..1b4d1dd 100644
--- a/tests/use_case/object_detection/InferenceTestYoloFastest.cc
+++ b/tests/use_case/object_detection/InferenceTestYoloFastest.cc
@@ -22,6 +22,15 @@
#include "InputFiles.hpp"
#include "UseCaseCommonUtils.hpp"
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
#include <catch.hpp>
void GetExpectedResults(std::vector<std::vector<arm::app::object_detection::DetectionResult>> &expected_results)
@@ -122,7 +131,10 @@ TEST_CASE("Running inference with TensorFlow Lite Micro and YoloFastest", "[Yolo
arm::app::YoloFastestModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
for (uint32_t i = 0 ; i < NUMBER_OF_FILES; ++i) {
@@ -136,7 +148,10 @@ TEST_CASE("Running inference with TensorFlow Lite Micro and YoloFastest", "[Yolo
arm::app::YoloFastestModel model{};
REQUIRE_FALSE(model.IsInited());
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
REQUIRE(model.IsInited());
TestInferenceDetectionResults<uint8_t>(i, model, 1);
diff --git a/tests/use_case/object_detection/ObjectDetectionUCTest.cc b/tests/use_case/object_detection/ObjectDetectionUCTest.cc
index 023b893..ffb4976 100644
--- a/tests/use_case/object_detection/ObjectDetectionUCTest.cc
+++ b/tests/use_case/object_detection/ObjectDetectionUCTest.cc
@@ -20,16 +20,29 @@
#include "YoloFastestModel.hpp"
#include "UseCaseHandler.hpp"
#include "UseCaseCommonUtils.hpp"
+#include "BufAttributes.hpp"
#include <catch.hpp>
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
TEST_CASE("Model info")
{
/* Model wrapper object. */
arm::app::YoloFastestModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -49,7 +62,10 @@ TEST_CASE("Inference by index")
arm::app::YoloFastestModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -72,7 +88,10 @@ TEST_CASE("Inference run all images")
arm::app::YoloFastestModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
@@ -95,7 +114,10 @@ TEST_CASE("List all images")
arm::app::YoloFastestModel model;
/* Load the model. */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context. */
arm::app::ApplicationContext caseContext;
diff --git a/tests/use_case/vww/VisualWakeWordUCTests.cc b/tests/use_case/vww/VisualWakeWordUCTests.cc
index 531764b..05a31a4 100644
--- a/tests/use_case/vww/VisualWakeWordUCTests.cc
+++ b/tests/use_case/vww/VisualWakeWordUCTests.cc
@@ -24,12 +24,24 @@
#include "Classifier.hpp"
#include "UseCaseCommonUtils.hpp"
+namespace arm {
+ namespace app {
+ static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
+ } /* namespace app */
+} /* namespace arm */
+
+extern uint8_t* GetModelPointer();
+extern size_t GetModelLen();
+
TEST_CASE("Model info")
{
arm::app::VisualWakeWordModel model; /* model wrapper object */
/* Load the model */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context */
arm::app::ApplicationContext caseContext;
@@ -46,7 +58,10 @@ TEST_CASE("Inference by index")
arm::app::VisualWakeWordModel model; /* model wrapper object */
/* Load the model */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context */
arm::app::ApplicationContext caseContext;
@@ -76,7 +91,10 @@ TEST_CASE("Inference run all images")
arm::app::VisualWakeWordModel model; /* model wrapper object */
/* Load the model */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context */
arm::app::ApplicationContext caseContext;
@@ -102,7 +120,10 @@ TEST_CASE("List all images")
arm::app::VisualWakeWordModel model; /* model wrapper object */
/* Load the model */
- REQUIRE(model.Init());
+ REQUIRE(model.Init(arm::app::tensorArena,
+ sizeof(arm::app::tensorArena),
+ GetModelPointer(),
+ GetModelLen()));
/* Instantiate application context */
arm::app::ApplicationContext caseContext;
@@ -110,4 +131,4 @@ TEST_CASE("List all images")
caseContext.Set<arm::app::Model&>("model", model);
REQUIRE(arm::app::ListFilesHandler(caseContext));
-} \ No newline at end of file
+}