summaryrefslogtreecommitdiff
path: root/tests/use_case/img_class
diff options
context:
space:
mode:
authorRichard Burton <richard.burton@arm.com>2021-11-10 16:27:14 +0000
committerRichard <richard.burton@arm.com>2021-11-10 16:34:16 +0000
commit005534664e192cf909a11435c4bc4696b1f4c51f (patch)
treef8314bd284561e1f0ff68fc393ee22d0318ae162 /tests/use_case/img_class
parentdee53bc7769d6201ec27deea4405c0df6c9b0623 (diff)
downloadml-embedded-evaluation-kit-005534664e192cf909a11435c4bc4696b1f4c51f.tar.gz
MLECO-2354 MLECO-2355 MLECO-2356: Moving noise reduction to public repository
* Use RNNoise model from PMZ * Add Noise reduction use-case Signed-off-by: Richard burton <richard.burton@arm.com> Change-Id: Ia8cc7ef102e22a5ff8bfbd3833594a4905a66057
Diffstat (limited to 'tests/use_case/img_class')
-rw-r--r--tests/use_case/img_class/InferenceTestMobilenetV2.cc10
1 files changed, 5 insertions, 5 deletions
diff --git a/tests/use_case/img_class/InferenceTestMobilenetV2.cc b/tests/use_case/img_class/InferenceTestMobilenetV2.cc
index bb89c99..07bd78f 100644
--- a/tests/use_case/img_class/InferenceTestMobilenetV2.cc
+++ b/tests/use_case/img_class/InferenceTestMobilenetV2.cc
@@ -29,9 +29,9 @@ bool RunInference(arm::app::Model& model, const int8_t imageData[])
TfLiteTensor* inputTensor = model.GetInputTensor(0);
REQUIRE(inputTensor);
- const size_t copySz = inputTensor->bytes < IFM_DATA_SIZE ?
+ const size_t copySz = inputTensor->bytes < IFM_0_DATA_SIZE ?
inputTensor->bytes :
- IFM_DATA_SIZE;
+ IFM_0_DATA_SIZE;
memcpy(inputTensor->data.data, imageData, copySz);
if(model.IsDataSigned()){
@@ -51,7 +51,7 @@ void TestInference(int imageIdx, arm::app::Model& model, T tolerance) {
TfLiteTensor* outputTensor = model.GetOutputTensor(0);
REQUIRE(outputTensor);
- REQUIRE(outputTensor->bytes == OFM_DATA_SIZE);
+ REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
auto tensorData = tflite::GetTensorData<T>(outputTensor);
REQUIRE(tensorData);
@@ -71,12 +71,12 @@ TEST_CASE("Running inference with TensorFlow Lite Micro and MobileNeV2 Uint8", "
REQUIRE(model.Init());
REQUIRE(model.IsInited());
- for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
+ for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
TestInference<uint8_t>(i, model, 1);
}
}
- for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
+ for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
DYNAMIC_SECTION("Executing inference with re-init")
{
arm::app::MobileNetModel model{};