summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIsabella Gottardi <isabella.gottardi@arm.com>2022-02-16 14:24:03 +0000
committerIsabella Gottardi <isabella.gottardi@arm.com>2022-02-25 17:29:30 +0000
commitef2b9ddd7771589e049c4103859ecef67fe87855 (patch)
tree9856299c572d32cd049816a04dbef7930dc3c7fb
parent177c69d40dddd4db9da7875b9979b82c67609cd1 (diff)
downloadml-embedded-evaluation-kit-ef2b9ddd7771589e049c4103859ecef67fe87855.tar.gz
MLECO-2881: Revise resources_downloaded based on optimised models' metadata
Change-Id: I12777c3818463c11d6351db0b4961a2bc0b00b18 Signed-off-by: Isabella Gottardi <isabella.gottardi@arm.com>
-rw-r--r--CMakeLists.txt3
-rw-r--r--scripts/cmake/util_functions.cmake27
-rw-r--r--scripts/py/check_update_resources_downloaded.py84
-rwxr-xr-xset_up_default_resources.py571
4 files changed, 514 insertions, 171 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index bbd7756..e693983 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -58,6 +58,9 @@ endif()
include(${CMAKE_SCRIPTS_DIR}/common_user_options.cmake)
+# Check if the resources_downloaded needs update
+check_update_public_resources(${RESOURCES_DIR})
+
add_platform_build_configuration(TARGET_PLATFORM ${TARGET_PLATFORM})
set_platform_global_defaults()
diff --git a/scripts/cmake/util_functions.cmake b/scripts/cmake/util_functions.cmake
index 447265c..ee9eceb 100644
--- a/scripts/cmake/util_functions.cmake
+++ b/scripts/cmake/util_functions.cmake
@@ -1,5 +1,5 @@
#----------------------------------------------------------------------------
-# Copyright (c) 2021 Arm Limited. All rights reserved.
+# Copyright (c) 2021-2022 Arm Limited. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -152,7 +152,7 @@ endfunction()
# Function to download a files from the Arm Model Zoo
# Arguments:
# model_zoo_version: hash of the Arm Model Zoo commit to use
-# file_sub_path: subpath within the model zoo respository
+# file_sub_path: subpath within the model zoo repository
# download_path: location where this file is to be downloaded (path including filename)
function(download_file_from_modelzoo model_zoo_version file_sub_path download_path)
@@ -190,4 +190,25 @@ function(add_platform_build_configuration)
message(STATUS "Found build configuration: ${PLATFORM_BUILD_CONFIG}")
include(${PLATFORM_BUILD_CONFIG}/build_configuration.cmake)
-endfunction() \ No newline at end of file
+endfunction()
+
+function(check_update_public_resources resource_downloaded_dir)
+
+ string(JOIN "/" FILE_URL ${resource_downloaded_dir})
+ execute_process(
+ COMMAND python3 ${CMAKE_SOURCE_DIR}/scripts/py/check_update_resources_downloaded.py
+ --resource_downloaded_dir ${resource_downloaded_dir}
+ --setup_script_path ${CMAKE_SOURCE_DIR}/set_up_default_resources.py
+ RESULT_VARIABLE return_code
+ )
+ if (NOT return_code EQUAL "0")
+ if (NOT return_code EQUAL "1")
+ # Return code equal to 2 or else means an error in the resources_downloaded folder
+ message(FATAL_ERROR "Resources downloaded error, please run: set_up_default_resources.py")
+ else()
+ # Return code equal to 1 means that resources_downloaded need to be updated
+ message(FATAL_ERROR "Resources downloaded need to be updated, please run: set_up_default_resources.py --clean")
+ endif()
+ endif ()
+
+endfunction()
diff --git a/scripts/py/check_update_resources_downloaded.py b/scripts/py/check_update_resources_downloaded.py
new file mode 100644
index 0000000..6408f14
--- /dev/null
+++ b/scripts/py/check_update_resources_downloaded.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+# Copyright (c) 2022 Arm Limited. All rights reserved.
+# SPDX-License-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import os
+import subprocess
+import sys
+from argparse import ArgumentParser
+
+
+def check_update_resources_downloaded(
+ resource_downloaded_dir: str, set_up_script_path: str
+):
+ """
+ Function that check if the resources downloaded need to be refreshed.
+
+ Parameters:
+ ----------
+ resource_downloaded_dir (string): Specifies the path to resources_downloaded folder.
+ set_up_script_path (string): Specifies the path to set_up_default_resources.py file.
+ """
+
+ metadata_file_path = os.path.join(
+ resource_downloaded_dir, "resources_downloaded_metadata.json"
+ )
+
+ if os.path.isfile(metadata_file_path):
+ with open(metadata_file_path) as metadata_json:
+
+ metadata_dict = json.load(metadata_json)
+ set_up_script_hash = metadata_dict["set_up_script_hash"]
+ command = f"git log -1 --pretty=tformat:%H {set_up_script_path}"
+
+ proc = subprocess.run(
+ command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True
+ )
+ git_commit_hash = proc.stdout.decode("utf-8").strip("\n")
+ proc.check_returncode()
+
+ if set_up_script_hash == git_commit_hash:
+ return 0
+ # Return code 1 if the resources need to be refreshed.
+ return 1
+ # Return error code 2 if the file doesn't exists.
+ return 2
+
+
+if __name__ == "__main__":
+ parser = ArgumentParser()
+ parser.add_argument(
+ "--resource_downloaded_dir", help="Resources downloaded directory.", type=str
+ )
+ parser.add_argument(
+ "--setup_script_path", help="Path to set_up_default_resources.py.", type=str
+ )
+ args = parser.parse_args()
+
+ # Check if the repo root directory is a git repository
+ root_file_dir = os.path.dirname(os.path.abspath(args.setup_script_path))
+ is_git_repo = os.path.exists(os.path.join(root_file_dir, ".git"))
+
+ # if we have a git repo then check the resources are downloaded,
+ # otherwise it's considered a prerequisite to have run
+ # the set_up_default_resources.py
+ status = (
+ check_update_resources_downloaded(
+ args.resource_downloaded_dir, args.setup_script_path
+ )
+ if is_git_repo
+ else 0
+ )
+ sys.exit(status)
diff --git a/set_up_default_resources.py b/set_up_default_resources.py
index 32c4506..48e6a67 100755
--- a/set_up_default_resources.py
+++ b/set_up_default_resources.py
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-
-# Copyright (c) 2022 Arm Limited. All rights reserved.
+# Copyright (c) 2021-2022 Arm Limited. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,137 +13,244 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-import os, errno
-import urllib.request
-import subprocess
+import errno
import fnmatch
+import json
import logging
+import os
+import re
+import shutil
+import subprocess
import sys
-
-from argparse import ArgumentParser, ArgumentTypeError
-from urllib.error import URLError
+import urllib.request
+from argparse import ArgumentParser
+from argparse import ArgumentTypeError
from collections import namedtuple
+from urllib.error import URLError
-json_uc_res = [{
- "use_case_name": "ad",
- "resources": [{"name": "ad_medium_int8.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/7c32b097f7d94aae2cd0b98a8ed5a3ba81e66b18/models/anomaly_detection/micronet_medium/tflite_int8/ad_medium_int8.tflite"},
- {"name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/7c32b097f7d94aae2cd0b98a8ed5a3ba81e66b18/models/anomaly_detection/micronet_medium/tflite_int8/testing_input/input/0.npy"},
- {"name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/7c32b097f7d94aae2cd0b98a8ed5a3ba81e66b18/models/anomaly_detection/micronet_medium/tflite_int8/testing_output/Identity/0.npy"}]
-},
+json_uc_res = [
+ {
+ "use_case_name": "ad",
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/7c32b097f7d94aae2cd0b98a8ed5a3ba81e66b18/models/anomaly_detection/micronet_medium/tflite_int8/"
+ ],
+ "resources": [
+ {
+ "name": "ad_medium_int8.tflite",
+ "url": "{url_prefix:0}ad_medium_int8.tflite",
+ },
+ {"name": "ifm0.npy", "url": "{url_prefix:0}testing_input/input/0.npy"},
+ {"name": "ofm0.npy", "url": "{url_prefix:0}testing_output/Identity/0.npy"},
+ ],
+ },
{
"use_case_name": "asr",
- "resources": [{"name": "wav2letter_pruned_int8.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/wav2letter_pruned_int8.tflite"},
- {"name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/testing_input/input_2_int8/0.npy"},
- {"name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/testing_output/Identity_int8/0.npy"}]
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/"
+ ],
+ "resources": [
+ {
+ "name": "wav2letter_pruned_int8.tflite",
+ "url": "{url_prefix:0}wav2letter_pruned_int8.tflite",
+ },
+ {
+ "name": "ifm0.npy",
+ "url": "{url_prefix:0}testing_input/input_2_int8/0.npy",
+ },
+ {
+ "name": "ofm0.npy",
+ "url": "{url_prefix:0}testing_output/Identity_int8/0.npy",
+ },
+ ],
},
{
"use_case_name": "img_class",
- "resources": [{"name": "mobilenet_v2_1.0_224_INT8.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/e0aa361b03c738047b9147d1a50e3f2dcb13dbcb/models/image_classification/mobilenet_v2_1.0_224/tflite_int8/mobilenet_v2_1.0_224_INT8.tflite"},
- {"name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/e0aa361b03c738047b9147d1a50e3f2dcb13dbcb/models/image_classification/mobilenet_v2_1.0_224/tflite_int8/testing_input/tfl.quantize/0.npy"},
- {"name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/e0aa361b03c738047b9147d1a50e3f2dcb13dbcb/models/image_classification/mobilenet_v2_1.0_224/tflite_int8/testing_output/MobilenetV2/Predictions/Reshape_11/0.npy"}]
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/e0aa361b03c738047b9147d1a50e3f2dcb13dbcb/models/image_classification/mobilenet_v2_1.0_224/tflite_int8/"
+ ],
+ "resources": [
+ {
+ "name": "mobilenet_v2_1.0_224_INT8.tflite",
+ "url": "{url_prefix:0}mobilenet_v2_1.0_224_INT8.tflite",
+ },
+ {
+ "name": "ifm0.npy",
+ "url": "{url_prefix:0}testing_input/tfl.quantize/0.npy",
+ },
+ {
+ "name": "ofm0.npy",
+ "url": "{url_prefix:0}testing_output/MobilenetV2/Predictions/Reshape_11/0.npy",
+ },
+ ],
},
{
"use_case_name": "object_detection",
- "resources": [{"name": "yolo-fastest_192_face_v4.tflite",
- "url": "https://github.com/emza-vs/ModelZoo/blob/v1.0/object_detection/yolo-fastest_192_face_v4.tflite?raw=true"}]
+ "url_prefix": [
+ "https://github.com/emza-vs/ModelZoo/blob/v1.0/object_detection/"
+ ],
+ "resources": [
+ {
+ "name": "yolo-fastest_192_face_v4.tflite",
+ "url": "{url_prefix:0}yolo-fastest_192_face_v4.tflite?raw=true",
+ }
+ ],
},
{
"use_case_name": "kws",
- "resources": [{"name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/testing_input/input/0.npy"},
- {"name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/testing_output/Identity/0.npy"},
- {"name": "kws_micronet_m.tflite",
- "url": " https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/kws_micronet_m.tflite"}]
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/"
+ ],
+ "resources": [
+ {"name": "ifm0.npy", "url": "{url_prefix:0}testing_input/input/0.npy"},
+ {"name": "ofm0.npy", "url": "{url_prefix:0}testing_output/Identity/0.npy"},
+ {
+ "name": "kws_micronet_m.tflite",
+ "url": "{url_prefix:0}kws_micronet_m.tflite",
+ },
+ ],
},
- {
+ {
"use_case_name": "vww",
- "resources": [{"name": "vww4_128_128_INT8.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/7dd3b16bb84007daf88be8648983c07f3eb21140/models/visual_wake_words/micronet_vww4/tflite_int8/vww4_128_128_INT8.tflite"},
- {"name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/7dd3b16bb84007daf88be8648983c07f3eb21140/models/visual_wake_words/micronet_vww4/tflite_int8/testing_input/input/0.npy"},
- {"name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/7dd3b16bb84007daf88be8648983c07f3eb21140/models/visual_wake_words/micronet_vww4/tflite_int8/testing_output/Identity/0.npy"}]
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/7dd3b16bb84007daf88be8648983c07f3eb21140/models/visual_wake_words/micronet_vww4/tflite_int8/"
+ ],
+ "resources": [
+ {
+ "name": "vww4_128_128_INT8.tflite",
+ "url": "{url_prefix:0}vww4_128_128_INT8.tflite",
+ },
+ {"name": "ifm0.npy", "url": "{url_prefix:0}testing_input/input/0.npy"},
+ {"name": "ofm0.npy", "url": "{url_prefix:0}testing_output/Identity/0.npy"},
+ ],
},
{
"use_case_name": "kws_asr",
- "resources": [{"name": "wav2letter_pruned_int8.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/wav2letter_pruned_int8.tflite"},
- {"sub_folder": "asr", "name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/testing_input/input_2_int8/0.npy"},
- {"sub_folder": "asr", "name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/testing_output/Identity_int8/0.npy"},
- {"sub_folder": "kws", "name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/testing_input/input/0.npy"},
- {"sub_folder": "kws", "name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/testing_output/Identity/0.npy"},
- {"name": "kws_micronet_m.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/kws_micronet_m.tflite"}]
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/1a92aa08c0de49a7304e0a7f3f59df6f4fd33ac8/models/speech_recognition/wav2letter/tflite_pruned_int8/",
+ "https://github.com/ARM-software/ML-zoo/raw/9f506fe52b39df545f0e6c5ff9223f671bc5ae00/models/keyword_spotting/micronet_medium/tflite_int8/",
+ ],
+ "resources": [
+ {
+ "name": "wav2letter_pruned_int8.tflite",
+ "url": "{url_prefix:0}wav2letter_pruned_int8.tflite",
+ },
+ {
+ "sub_folder": "asr",
+ "name": "ifm0.npy",
+ "url": "{url_prefix:0}testing_input/input_2_int8/0.npy",
+ },
+ {
+ "sub_folder": "asr",
+ "name": "ofm0.npy",
+ "url": "{url_prefix:0}testing_output/Identity_int8/0.npy",
+ },
+ {
+ "sub_folder": "kws",
+ "name": "ifm0.npy",
+ "url": "{url_prefix:1}testing_input/input/0.npy",
+ },
+ {
+ "sub_folder": "kws",
+ "name": "ofm0.npy",
+ "url": "{url_prefix:1}testing_output/Identity/0.npy",
+ },
+ {
+ "name": "kws_micronet_m.tflite",
+ "url": "{url_prefix:1}kws_micronet_m.tflite",
+ },
+ ],
},
{
"use_case_name": "noise_reduction",
- "resources": [{"name": "rnnoise_INT8.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/rnnoise_INT8.tflite"},
- {"name": "ifm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_input/main_input_int8/0.npy"},
- {"name": "ifm1.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_input/vad_gru_prev_state_int8/0.npy"},
- {"name": "ifm2.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_input/noise_gru_prev_state_int8/0.npy"},
- {"name": "ifm3.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_input/denoise_gru_prev_state_int8/0.npy"},
- {"name": "ofm0.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_output/Identity_int8/0.npy"},
- {"name": "ofm1.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_output/Identity_1_int8/0.npy"},
- {"name": "ofm2.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_output/Identity_2_int8/0.npy"},
- {"name": "ofm3.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_output/Identity_3_int8/0.npy"},
- {"name": "ofm4.npy",
- "url": "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/testing_output/Identity_4_int8/0.npy"},
- ]
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/a061600058097a2785d6f1f7785e5a2d2a142955/models/noise_suppression/RNNoise/tflite_int8/"
+ ],
+ "resources": [
+ {"name": "rnnoise_INT8.tflite", "url": "{url_prefix:0}rnnoise_INT8.tflite"},
+ {
+ "name": "ifm0.npy",
+ "url": "{url_prefix:0}testing_input/main_input_int8/0.npy",
+ },
+ {
+ "name": "ifm1.npy",
+ "url": "{url_prefix:0}testing_input/vad_gru_prev_state_int8/0.npy",
+ },
+ {
+ "name": "ifm2.npy",
+ "url": "{url_prefix:0}testing_input/noise_gru_prev_state_int8/0.npy",
+ },
+ {
+ "name": "ifm3.npy",
+ "url": "{url_prefix:0}testing_input/denoise_gru_prev_state_int8/0.npy",
+ },
+ {
+ "name": "ofm0.npy",
+ "url": "{url_prefix:0}testing_output/Identity_int8/0.npy",
+ },
+ {
+ "name": "ofm1.npy",
+ "url": "{url_prefix:0}testing_output/Identity_1_int8/0.npy",
+ },
+ {
+ "name": "ofm2.npy",
+ "url": "{url_prefix:0}testing_output/Identity_2_int8/0.npy",
+ },
+ {
+ "name": "ofm3.npy",
+ "url": "{url_prefix:0}testing_output/Identity_3_int8/0.npy",
+ },
+ {
+ "name": "ofm4.npy",
+ "url": "{url_prefix:0}testing_output/Identity_4_int8/0.npy",
+ },
+ ],
},
{
"use_case_name": "inference_runner",
- "resources": [{"name": "dnn_s_quantized.tflite",
- "url": "https://github.com/ARM-software/ML-zoo/raw/68b5fbc77ed28e67b2efc915997ea4477c1d9d5b/models/keyword_spotting/dnn_small/tflite_int8/dnn_s_quantized.tflite"}
- ]
- },]
+ "url_prefix": [
+ "https://github.com/ARM-software/ML-zoo/raw/68b5fbc77ed28e67b2efc915997ea4477c1d9d5b/models/keyword_spotting/dnn_small/tflite_int8/"
+ ],
+ "resources": [
+ {
+ "name": "dnn_s_quantized.tflite",
+ "url": "{url_prefix:0}dnn_s_quantized.tflite",
+ }
+ ],
+ },
+]
# Valid NPU configurations:
valid_npu_config_names = [
- 'ethos-u55-32', 'ethos-u55-64',
- 'ethos-u55-128', 'ethos-u55-256',
- 'ethos-u65-256','ethos-u65-512']
+ "ethos-u55-32",
+ "ethos-u55-64",
+ "ethos-u55-128",
+ "ethos-u55-256",
+ "ethos-u65-256",
+ "ethos-u65-512",
+]
# Default NPU configurations (these are always run when the models are optimised)
default_npu_config_names = [valid_npu_config_names[2], valid_npu_config_names[4]]
# NPU config named tuple
-NPUConfig = namedtuple('NPUConfig',['config_name',
- 'memory_mode',
- 'system_config',
- 'ethos_u_npu_id',
- 'ethos_u_config_id',
- 'arena_cache_size'])
+NPUConfig = namedtuple(
+ "NPUConfig",
+ [
+ "config_name",
+ "memory_mode",
+ "system_config",
+ "ethos_u_npu_id",
+ "ethos_u_config_id",
+ "arena_cache_size",
+ ],
+)
# The internal SRAM size for Corstone-300 implementation on MPS3 specified by AN552
-mps3_max_sram_sz = 2 * 1024 * 1024 # 2 MiB (2 banks of 1 MiB each)
+mps3_max_sram_sz = 2 * 1024 * 1024 # 2 MiB (2 banks of 1 MiB each)
-def call_command(command: str) -> str:
+def call_command(command: str, verbose: bool = True) -> str:
"""
Helpers function that call subprocess and return the output.
@@ -152,10 +258,13 @@ def call_command(command: str) -> str:
----------
command (string): Specifies the command to run.
"""
- logging.info(command)
- proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
+ if verbose:
+ logging.info(command)
+ proc = subprocess.run(
+ command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True
+ )
log = proc.stdout.decode("utf-8")
- if proc.returncode == 0:
+ if proc.returncode == 0 and verbose:
logging.info(log)
else:
logging.error(log)
@@ -163,7 +272,9 @@ def call_command(command: str) -> str:
return log
-def get_default_npu_config_from_name(config_name: str, arena_cache_size: int = 0) -> NPUConfig:
+def get_default_npu_config_from_name(
+ config_name: str, arena_cache_size: int = 0
+) -> NPUConfig:
"""
Gets the file suffix for the tflite file from the
`accelerator_config` string.
@@ -183,10 +294,12 @@ def get_default_npu_config_from_name(config_name: str, arena_cache_size: int = 0
config name
"""
if config_name not in valid_npu_config_names:
- raise ValueError(f"""
+ raise ValueError(
+ f"""
Invalid Ethos-U NPU configuration.
Select one from {valid_npu_config_names}.
- """)
+ """
+ )
strings_ids = ["ethos-u55-", "ethos-u65-"]
processor_ids = ["U55", "U65"]
@@ -195,28 +308,43 @@ def get_default_npu_config_from_name(config_name: str, arena_cache_size: int = 0
system_configs = ["Ethos_U55_High_End_Embedded", "Ethos_U65_High_End"]
memory_modes_arena = {
# For shared SRAM memory mode, we use the MPS3 SRAM size by default
- "Shared_Sram" : mps3_max_sram_sz if arena_cache_size <= 0 else arena_cache_size,
+ "Shared_Sram": mps3_max_sram_sz if arena_cache_size <= 0 else arena_cache_size,
# For dedicated SRAM memory mode, we do no override the arena size. This is expected to
# be defined in the vela configuration file instead.
- "Dedicated_Sram" : None if arena_cache_size <= 0 else arena_cache_size
+ "Dedicated_Sram": None if arena_cache_size <= 0 else arena_cache_size,
}
for i in range(len(strings_ids)):
if config_name.startswith(strings_ids[i]):
npu_config_id = config_name.replace(strings_ids[i], prefix_ids[i])
- return NPUConfig(config_name=config_name,
- memory_mode=memory_modes[i],
- system_config=system_configs[i],
- ethos_u_npu_id=processor_ids[i],
- ethos_u_config_id=npu_config_id,
- arena_cache_size=memory_modes_arena[memory_modes[i]])
+ return NPUConfig(
+ config_name=config_name,
+ memory_mode=memory_modes[i],
+ system_config=system_configs[i],
+ ethos_u_npu_id=processor_ids[i],
+ ethos_u_config_id=npu_config_id,
+ arena_cache_size=memory_modes_arena[memory_modes[i]],
+ )
return None
-def set_up_resources(run_vela_on_models: bool = False,
- additional_npu_config_names: list = (),
- arena_cache_size: int = 0):
+def remove_tree_dir(dir_path):
+ try:
+ # Remove the full directory
+ shutil.rmtree(dir_path)
+ # Re-create an empty one
+ os.mkdir(dir_path)
+ except Exception as e:
+ logging.error(f"Failed to delete {dir_path}.")
+
+
+def set_up_resources(
+ run_vela_on_models: bool = False,
+ additional_npu_config_names: list = (),
+ arena_cache_size: int = 0,
+ check_clean_folder: bool = False,
+):
"""
Helpers function that retrieve the output from a command.
@@ -229,8 +357,26 @@ def set_up_resources(run_vela_on_models: bool = False,
as the cache size. If 0, the default values, as per
the NPU config requirements, are used.
"""
+ # Paths
current_file_dir = os.path.dirname(os.path.abspath(__file__))
- download_dir = os.path.abspath(os.path.join(current_file_dir, "resources_downloaded"))
+ download_dir = os.path.abspath(
+ os.path.join(current_file_dir, "resources_downloaded")
+ )
+ metadata_file_path = os.path.join(
+ download_dir, "resources_downloaded_metadata.json"
+ )
+
+ metadata_dict = dict()
+ vela_version = "3.2.0"
+
+ # Check if the current directory is a git repository
+ is_git_repo = os.path.exists(os.path.join(current_file_dir, ".git"))
+ git_commit_hash = ""
+ setup_script_hash_changed = False
+ if is_git_repo:
+ # If the current directory is a git script then extract the set_up_default_resorces.py hash
+ command = f"git log -1 --pretty=tformat:%H {os.path.abspath(__file__)}"
+ git_commit_hash = call_command(command, False)
try:
# 1.1 Does the download dir exist?
@@ -238,12 +384,35 @@ def set_up_resources(run_vela_on_models: bool = False,
except OSError as e:
if e.errno == errno.EEXIST:
logging.info("'resources_downloaded' directory exists.")
+ # Check and clean?
+ if check_clean_folder and os.path.isfile(metadata_file_path):
+ with open(metadata_file_path) as (metadata_file):
+ metadata_dict = json.load(metadata_file)
+ vela_in_metadata = metadata_dict["ethosu_vela_version"]
+ if vela_in_metadata != vela_version:
+ # Check if all the resources needs to be removed and regenerated.
+ # This can happen when the Vela version has changed.
+ logging.info(
+ f"Vela version in metadata is {vela_in_metadata}, current {vela_version}. Removing the resources and re-download them."
+ )
+ remove_tree_dir(download_dir)
+ metadata_dict = dict()
+ else:
+ # Check if the set_up_default_resorces.py has changed from last setup, only if this is a git repo
+ if is_git_repo:
+ setup_script_hash_changed = not (
+ metadata_dict["set_up_script_hash"] == git_commit_hash
+ )
else:
raise
# 1.2 Does the virtual environment exist?
- env_python = str(os.path.abspath(os.path.join(download_dir, "env", "bin", "python3")))
- env_activate = str(os.path.abspath(os.path.join(download_dir, "env", "bin", "activate")))
+ env_python = str(
+ os.path.abspath(os.path.join(download_dir, "env", "bin", "python3"))
+ )
+ env_activate = str(
+ os.path.abspath(os.path.join(download_dir, "env", "bin", "activate"))
+ )
if not os.path.isdir(os.path.join(download_dir, "env")):
os.chdir(download_dir)
# Create the virtual environment
@@ -254,8 +423,9 @@ def set_up_resources(run_vela_on_models: bool = False,
command = f"{env_python} -m {c}"
call_command(command)
os.chdir(current_file_dir)
+
# 1.3 Make sure to have all the requirement
- requirements = ["ethos-u-vela==3.2.0"]
+ requirements = [f"ethos-u-vela=={vela_version}"]
command = f"{env_python} -m pip freeze"
packages = call_command(command)
for req in requirements:
@@ -264,41 +434,64 @@ def set_up_resources(run_vela_on_models: bool = False,
call_command(command)
# 2. Download models
+ logging.info("Downloading resources.")
for uc in json_uc_res:
+ use_case_name = uc["use_case_name"]
+ res_url_prefix = uc["url_prefix"]
try:
# Does the usecase_name download dir exist?
- os.mkdir(os.path.join(download_dir, uc["use_case_name"]))
+ os.mkdir(os.path.join(download_dir, use_case_name))
except OSError as e:
- if e.errno != errno.EEXIST:
- logging.error(f"Error creating {uc['use_case_name']} directory.")
+ if e.errno == errno.EEXIST:
+ # The usecase_name download dir exist
+ if setup_script_hash_changed:
+ for idx, metadata_uc_url_prefix in enumerate(
+ [
+ f
+ for f in metadata_dict["resources_info"]
+ if f["use_case_name"] == use_case_name
+ ][0]["url_prefix"]
+ ):
+ if metadata_uc_url_prefix != res_url_prefix[idx]:
+ logging.info(f"Removing {use_case_name} resources.")
+ remove_tree_dir(os.path.join(download_dir, use_case_name))
+ break
+ elif e.errno != errno.EEXIST:
+ logging.error(f"Error creating {use_case_name} directory.")
raise
+ reg_expr_str = r"{url_prefix:(.*\d)}"
+ reg_expr_pattern = re.compile(reg_expr_str)
for res in uc["resources"]:
res_name = res["name"]
- res_url = res["url"]
+ url_prefix_idx = int(reg_expr_pattern.search(res["url"]).group(1))
+ res_url = res_url_prefix[url_prefix_idx] + re.sub(
+ reg_expr_str, "", res["url"]
+ )
+
+ sub_folder = ""
if "sub_folder" in res:
try:
# Does the usecase_name/sub_folder download dir exist?
- os.mkdir(os.path.join(download_dir, uc["use_case_name"], res["sub_folder"]))
+ os.mkdir(
+ os.path.join(download_dir, use_case_name, res["sub_folder"])
+ )
except OSError as e:
if e.errno != errno.EEXIST:
- logging.error(f"Error creating {uc['use_case_name']} / {res['sub_folder']} directory.")
+ logging.error(
+ f"Error creating {use_case_name} / {res['sub_folder']} directory."
+ )
raise
- res_dst = os.path.join(download_dir,
- uc["use_case_name"],
- res["sub_folder"],
- res_name)
- else:
- res_dst = os.path.join(download_dir,
- uc["use_case_name"],
- res_name)
+ sub_folder = res["sub_folder"]
+
+ res_dst = os.path.join(download_dir, use_case_name, sub_folder, res_name)
if os.path.isfile(res_dst):
logging.info(f"File {res_dst} exists, skipping download.")
else:
try:
g = urllib.request.urlopen(res_url)
- with open(res_dst, 'b+w') as f:
+ with open(res_dst, "b+w") as f:
f.write(g.read())
logging.info(f"- Downloaded {res_url} to {res_dst}.")
except URLError:
@@ -314,18 +507,26 @@ def set_up_resources(run_vela_on_models: bool = False,
# Note: To avoid to run vela twice on the same model, it's supposed that
# downloaded model names don't contain the 'vela' word.
if run_vela_on_models is True:
- config_file = os.path.join(current_file_dir, "scripts", "vela", "default_vela.ini")
- models = [os.path.join(dirpath, f)
- for dirpath, dirnames, files in os.walk(download_dir)
- for f in fnmatch.filter(files, '*.tflite') if "vela" not in f]
+ config_file = os.path.join(
+ current_file_dir, "scripts", "vela", "default_vela.ini"
+ )
+ models = [
+ os.path.join(dirpath, f)
+ for dirpath, dirnames, files in os.walk(download_dir)
+ for f in fnmatch.filter(files, "*.tflite")
+ if "vela" not in f
+ ]
# Consolidate all config names while discarding duplicates:
config_names = list(set(default_npu_config_names + additional_npu_config_names))
# Get npu config tuple for each config name in a list:
- npu_configs = [get_default_npu_config_from_name(name, arena_cache_size) for name in config_names]
+ npu_configs = [
+ get_default_npu_config_from_name(name, arena_cache_size)
+ for name in config_names
+ ]
- logging.info(f'All models will be optimised for these configs:')
+ logging.info(f"All models will be optimised for these configs:")
for config in npu_configs:
logging.info(config)
@@ -340,25 +541,32 @@ def set_up_resources(run_vela_on_models: bool = False,
vela_command_arena_cache_size = ""
if config.arena_cache_size:
- vela_command_arena_cache_size = f"--arena-cache-size={config.arena_cache_size}"
-
- vela_command = (f". {env_activate} && vela {model} " +
- f"--accelerator-config={config.config_name} " +
- "--optimise Performance " +
- f"--config {config_file} " +
- f"--memory-mode={config.memory_mode} " +
- f"--system-config={config.system_config} " +
- f"--output-dir={output_dir} " +
- f"{vela_command_arena_cache_size}")
+ vela_command_arena_cache_size = (
+ f"--arena-cache-size={config.arena_cache_size}"
+ )
+
+ vela_command = (
+ f". {env_activate} && vela {model} "
+ + f"--accelerator-config={config.config_name} "
+ + "--optimise Performance "
+ + f"--config {config_file} "
+ + f"--memory-mode={config.memory_mode} "
+ + f"--system-config={config.system_config} "
+ + f"--output-dir={output_dir} "
+ + f"{vela_command_arena_cache_size}"
+ )
# we want the name to include the configuration suffix. For example: vela_H128,
# vela_Y512 etc.
- new_suffix = "_vela_" + config.ethos_u_config_id + '.tflite'
- new_vela_optimised_model_path = (
- vela_optimised_model_path.replace("_vela.tflite", new_suffix))
+ new_suffix = "_vela_" + config.ethos_u_config_id + ".tflite"
+ new_vela_optimised_model_path = vela_optimised_model_path.replace(
+ "_vela.tflite", new_suffix
+ )
if os.path.isfile(new_vela_optimised_model_path):
- logging.info(f"File {new_vela_optimised_model_path} exists, skipping optimisation.")
+ logging.info(
+ f"File {new_vela_optimised_model_path} exists, skipping optimisation."
+ )
optimisation_skipped = True
continue
@@ -366,36 +574,63 @@ def set_up_resources(run_vela_on_models: bool = False,
# rename default vela model
os.rename(vela_optimised_model_path, new_vela_optimised_model_path)
- logging.info(f"Renaming {vela_optimised_model_path} to {new_vela_optimised_model_path}.")
-
+ logging.info(
+ f"Renaming {vela_optimised_model_path} to {new_vela_optimised_model_path}."
+ )
# If any optimisation was skipped, show how to regenerate:
if optimisation_skipped:
logging.warning("One or more optimisations were skipped.")
- logging.warning(f"To optimise all the models, please remove the directory {download_dir}.")
+ logging.warning(
+ f"To optimise all the models, please remove the directory {download_dir}."
+ )
+
+ # 4. Collect and write metadata
+ logging.info("Collecting and write metadata.")
+ metadata_dict["ethosu_vela_version"] = vela_version
+ metadata_dict["set_up_script_hash"] = git_commit_hash.strip("\n")
+ metadata_dict["resources_info"] = json_uc_res
+
+ with open(metadata_file_path, "w") as metadata_file:
+ json.dump(metadata_dict, metadata_file, indent=4)
-if __name__ == '__main__':
+if __name__ == "__main__":
parser = ArgumentParser()
- parser.add_argument("--skip-vela",
- help="Do not run Vela optimizer on downloaded models.",
- action="store_true")
- parser.add_argument("--additional-ethos-u-config-name",
- help=f"""Additional (non-default) configurations for Vela:
+ parser.add_argument(
+ "--skip-vela",
+ help="Do not run Vela optimizer on downloaded models.",
+ action="store_true",
+ )
+ parser.add_argument(
+ "--additional-ethos-u-config-name",
+ help=f"""Additional (non-default) configurations for Vela:
{valid_npu_config_names}""",
- default=[], action="append")
- parser.add_argument("--arena-cache-size",
- help="Arena cache size in bytes (if overriding the defaults)",
- type=int,
- default=0)
+ default=[],
+ action="append",
+ )
+ parser.add_argument(
+ "--arena-cache-size",
+ help="Arena cache size in bytes (if overriding the defaults)",
+ type=int,
+ default=0,
+ )
+ parser.add_argument(
+ "--clean",
+ help="Clean the disctory and optimize the downloaded resources",
+ action="store_true",
+ )
args = parser.parse_args()
if args.arena_cache_size < 0:
- raise ArgumentTypeError('Arena cache size cannot not be less than 0')
+ raise ArgumentTypeError("Arena cache size cannot not be less than 0")
- logging.basicConfig(filename='log_build_default.log', level=logging.DEBUG)
+ logging.basicConfig(filename="log_build_default.log", level=logging.DEBUG)
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
- set_up_resources(not args.skip_vela,
- args.additional_ethos_u_config_name,
- args.arena_cache_size)
+ set_up_resources(
+ not args.skip_vela,
+ args.additional_ethos_u_config_name,
+ args.arena_cache_size,
+ args.clean,
+ )