/* * Copyright (c) 2021 Arm Limited. All rights reserved. * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "DsCnnModel.hpp" #include "hal.h" #include "TestData_kws.hpp" #include "TensorFlowLiteMicro.hpp" #include #include bool RunInference(arm::app::Model& model, const int8_t vec[]) { TfLiteTensor* inputTensor = model.GetInputTensor(0); REQUIRE(inputTensor); const size_t copySz = inputTensor->bytes < IFM_DATA_SIZE ? inputTensor->bytes : IFM_DATA_SIZE; memcpy(inputTensor->data.data, vec, copySz); return model.RunInference(); } bool RunInferenceRandom(arm::app::Model& model) { TfLiteTensor* inputTensor = model.GetInputTensor(0); REQUIRE(inputTensor); std::random_device rndDevice; std::mt19937 mersenneGen{rndDevice()}; std::uniform_int_distribution dist {-128, 127}; auto gen = [&dist, &mersenneGen](){ return dist(mersenneGen); }; std::vector randomAudio(inputTensor->bytes); std::generate(std::begin(randomAudio), std::end(randomAudio), gen); REQUIRE(RunInference(model, randomAudio.data())); return true; } template void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::Model& model) { REQUIRE(RunInference(model, input_goldenFV)); TfLiteTensor* outputTensor = model.GetOutputTensor(0); REQUIRE(outputTensor); REQUIRE(outputTensor->bytes == OFM_DATA_SIZE); auto tensorData = tflite::GetTensorData(outputTensor); REQUIRE(tensorData); for (size_t i = 0; i < outputTensor->bytes; i++) { REQUIRE((int)tensorData[i] == (int)((T)output_goldenFV[i])); } } TEST_CASE("Running random inference with TensorFlow Lite Micro and DsCnnModel Int8", "[DS_CNN]") { arm::app::DsCnnModel model{}; REQUIRE_FALSE(model.IsInited()); REQUIRE(model.Init()); REQUIRE(model.IsInited()); REQUIRE(RunInferenceRandom(model)); } TEST_CASE("Running inference with TensorFlow Lite Micro and DsCnnModel Uint8", "[DS_CNN]") { for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) { const int8_t* input_goldenFV = get_ifm_data_array(i);; const int8_t* output_goldenFV = get_ofm_data_array(i); DYNAMIC_SECTION("Executing inference with re-init") { arm::app::DsCnnModel model{}; REQUIRE_FALSE(model.IsInited()); REQUIRE(model.Init()); REQUIRE(model.IsInited()); TestInference(input_goldenFV, output_goldenFV, model); } } }