aboutsummaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/BUILD.bazel28
-rw-r--r--scripts/arm_compute_library_nn_driver.go48
-rwxr-xr-xscripts/caffe_data_extractor.py45
-rwxr-xr-xscripts/check_bad_style.sh2
-rw-r--r--scripts/check_header_guards.py208
-rw-r--r--scripts/clang-tidy.h7
-rwxr-xr-xscripts/clang_tidy_rules.py32
-rwxr-xr-xscripts/ensure_single_eol.py48
-rwxr-xr-xscripts/format_code.py426
-rwxr-xr-xscripts/format_doxygen.py2
-rwxr-xr-xscripts/generate_android_bp.py213
-rw-r--r--scripts/generate_build_files.py294
-rw-r--r--scripts/generate_documentation.sh32
-rw-r--r--scripts/include_functions_kernels.py6
-rwxr-xr-xscripts/modules/Shell.py105
-rw-r--r--scripts/print_version_file.py55
-rwxr-xr-xscripts/tensorflow_data_extractor.py51
-rw-r--r--scripts/update_supported_ops.py414
18 files changed, 1907 insertions, 109 deletions
diff --git a/scripts/BUILD.bazel b/scripts/BUILD.bazel
new file mode 100644
index 0000000000..06ef17beb2
--- /dev/null
+++ b/scripts/BUILD.bazel
@@ -0,0 +1,28 @@
+# Copyright (c) 2023 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+py_binary(
+ name = "print_version_file",
+ srcs = ["print_version_file.py"],
+ main = "print_version_file.py",
+ visibility = ["//:__pkg__"],
+)
diff --git a/scripts/arm_compute_library_nn_driver.go b/scripts/arm_compute_library_nn_driver.go
index 8b1b80a7a7..2aab2d3fe7 100644
--- a/scripts/arm_compute_library_nn_driver.go
+++ b/scripts/arm_compute_library_nn_driver.go
@@ -1,5 +1,5 @@
//
-// Copyright © 2020 ARM Ltd. All rights reserved.
+// Copyright © 2020-2023 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//
@@ -11,15 +11,52 @@ import (
"strings"
)
+func isVersionAtLeast(version_name string, target_version int) bool {
+ name_map := map[string]int {
+ "L": 5, "5": 5,
+ "M": 6, "6": 6,
+ "N": 7, "7": 7,
+ "O": 8, "8": 8,
+ "P": 9, "9": 9,
+ "Q": 10, "10": 10,
+ "R": 11, "11": 11,
+ "S": 12, "12": 12,
+ "T": 13, "13": 13,
+ "U": 14, "14": 14,
+ }
+ if _, ok := name_map[version_name]; ok {
+ return name_map[version_name] >= target_version
+ } else {
+ return false
+ }
+}
+
func globalFlags(ctx android.BaseContext) []string {
var cppflags []string
- if ctx.AConfig().PlatformVersionName() == "Q" || ctx.AConfig().PlatformVersionName() == "10" ||
+ if ctx.AConfig().PlatformVersionName() == "Q" || ctx.AConfig().PlatformVersionName() == "10" ||
ctx.AConfig().PlatformVersionName() == "R" || ctx.AConfig().PlatformVersionName() == "11" ||
ctx.AConfig().PlatformVersionName() == "S" || ctx.AConfig().PlatformVersionName() == "12" {
cppflags = append(cppflags, "-fno-addrsig")
}
+ if ctx.AConfig().PlatformVersionName() == "R" || ctx.AConfig().PlatformVersionName() == "11" {
+ for _, a := range ctx.DeviceConfig().Arches() {
+ theArch := a.ArchType.String()
+ if theArch == "armv8-2a" {
+ cppflags = append(cppflags, "-march=armv8.2-a+fp16")
+ cppflags = append(cppflags, "-DARM_COMPUTE_ENABLE_FP16")
+ cppflags = append(cppflags, "-DENABLE_FP16_KERNELS")
+ }
+ }
+ }
+
+ // Since Android T, the underlying NDK stops supporting system assembler like GAS, in favor of integrated assembler
+ // However for Android < Android T we still want to suppress integrated assembler for backward compatibility
+ if ! isVersionAtLeast(ctx.AConfig().PlatformVersionName(), 13) {
+ cppflags = append(cppflags, "-no-integrated-as")
+ }
+
data_types := strings.Split(ctx.AConfig().GetenvWithDefault("COMPUTE_LIB_DATA_TYPE", "ALL"), ",")
for _, x := range data_types {
@@ -38,9 +75,6 @@ func globalFlags(ctx android.BaseContext) []string {
if strings.ToUpper(x) == "ALL" || strings.ToUpper(x) == "QSYMM16" {
cppflags = append(cppflags, "-DENABLE_QSYMM16_KERNELS")
}
- if strings.ToUpper(x) == "ALL" || strings.ToUpper(x) == "FP16" {
- cppflags = append(cppflags, "-DENABLE_FP16_KERNELS")
- }
if strings.ToUpper(x) == "ALL" || strings.ToUpper(x) == "FP32" {
cppflags = append(cppflags, "-DENABLE_FP32_KERNELS")
}
@@ -57,8 +91,8 @@ func globalFlags(ctx android.BaseContext) []string {
}
}
- cppflags = append(cppflags, "-ARM_COMPUTE_CPU_ENABLED")
- cppflags = append(cppflags, "-ARM_COMPUTE_OPENCL_ENABLED")
+ cppflags = append(cppflags, "-DARM_COMPUTE_CPU_ENABLED")
+ cppflags = append(cppflags, "-DARM_COMPUTE_OPENCL_ENABLED")
return cppflags
}
diff --git a/scripts/caffe_data_extractor.py b/scripts/caffe_data_extractor.py
deleted file mode 100755
index 47d24b265f..0000000000
--- a/scripts/caffe_data_extractor.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-"""Extracts trainable parameters from Caffe models and stores them in numpy arrays.
-Usage
- python caffe_data_extractor -m path_to_caffe_model_file -n path_to_caffe_netlist
-
-Saves each variable to a {variable_name}.npy binary file.
-
-Tested with Caffe 1.0 on Python 2.7
-"""
-import argparse
-import caffe
-import os
-import numpy as np
-
-
-if __name__ == "__main__":
- # Parse arguments
- parser = argparse.ArgumentParser('Extract Caffe net parameters')
- parser.add_argument('-m', dest='modelFile', type=str, required=True, help='Path to Caffe model file')
- parser.add_argument('-n', dest='netFile', type=str, required=True, help='Path to Caffe netlist')
- args = parser.parse_args()
-
- # Create Caffe Net
- net = caffe.Net(args.netFile, 1, weights=args.modelFile)
-
- # Read and dump blobs
- for name, blobs in net.params.iteritems():
- print('Name: {0}, Blobs: {1}'.format(name, len(blobs)))
- for i in range(len(blobs)):
- # Weights
- if i == 0:
- outname = name + "_w"
- # Bias
- elif i == 1:
- outname = name + "_b"
- else:
- continue
-
- varname = outname
- if os.path.sep in varname:
- varname = varname.replace(os.path.sep, '_')
- print("Renaming variable {0} to {1}".format(outname, varname))
- print("Saving variable {0} with shape {1} ...".format(varname, blobs[i].data.shape))
- # Dump as binary
- np.save(varname, blobs[i].data)
diff --git a/scripts/check_bad_style.sh b/scripts/check_bad_style.sh
index 0c0d83ac2f..91849266f4 100755
--- a/scripts/check_bad_style.sh
+++ b/scripts/check_bad_style.sh
@@ -29,7 +29,7 @@ then
exit -1
fi
-grep -HnRE --exclude-dir=assembly --exclude-dir=convolution --exclude-dir=arm_gemm --exclude-dir=arm_conv "\buint " --exclude-dir=cl_kernels --exclude-dir=cs_shaders $FILES | tee bad_style.log
+grep -HnRE --exclude-dir=assembly --exclude-dir=convolution --exclude-dir=arm_gemm --exclude-dir=dynamic_fusion --exclude-dir=arm_conv "\buint " --exclude-dir=cl_kernels --exclude-dir=cs_shaders $FILES | tee bad_style.log
if [[ $(cat bad_style.log | wc -l) > 0 ]]
then
echo ""
diff --git a/scripts/check_header_guards.py b/scripts/check_header_guards.py
new file mode 100644
index 0000000000..5c48b7501f
--- /dev/null
+++ b/scripts/check_header_guards.py
@@ -0,0 +1,208 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2023 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import argparse
+from typing import List, Tuple
+import logging
+import re
+
+logger = logging.getLogger("check_header_guards")
+
+def find_code_boundaries(lines: List[str]) -> (int, int):
+ inside_comment : bool = False
+
+ start = len(lines)
+ end = -1
+ line_num = 0
+ for line in lines:
+ stripped_line : str = line.strip()
+ if stripped_line.startswith("/*"): # block comment start
+ inside_comment = True
+
+ if not inside_comment and not stripped_line.startswith("//") and stripped_line != "":
+ start = min(line_num, start)
+ end = line_num
+
+ if inside_comment and stripped_line.endswith("*/"):
+ inside_comment = False
+
+ line_num += 1
+
+ return start, end
+
+
+def is_define(line: str) -> bool:
+ return line.strip().startswith("#define")
+
+def is_endif(line: str) -> bool:
+ return line.strip().startswith("#endif")
+
+def is_ifndef(line: str) -> bool:
+ return line.strip().startswith("#ifndef")
+
+# Strips the given line from // and /* */ blocks
+def strip_comments(line: str) -> str:
+ line = re.sub(r"/\*.*\*/", "", line)
+ line = re.sub(r"//.*", "", line)
+ return line.strip()
+
+# If the line
+# 1) startswith #ifndef
+# 2) is all uppercase
+# 3) does not start with double underscore, i.e. __
+# Then
+# It "looks" like a header guard
+def looks_like_header_guard(line: str) -> bool:
+ sline = line.strip()
+ guard_candidate = strip_comments(sline[len("#ifndef"):])
+
+ return is_ifndef(sline) and not guard_candidate.startswith("__") and guard_candidate.isupper()
+
+
+def fix_header_guard(lines: List[str], expected_header_guard: str, comment_style: str) -> Tuple[List[str], bool]:
+ start_line, next_line, last_line = "", "", ""
+ start_index, last_index = find_code_boundaries(lines)
+ guards_updated: bool = True
+
+ if start_index < len(lines):
+ # if not, the file is full of comments
+ start_line = lines[start_index]
+
+ if start_index + 1 < len(lines):
+ # if not, the file has only one line of code
+ next_line = lines[start_index + 1]
+
+ if last_index < len(lines) and last_index > start_index + 1:
+ # if not, either the file is full of comments OR it has less than three code lines
+ last_line = lines[last_index]
+
+ expected_start_line = f"#ifndef {expected_header_guard}\n"
+ expected_next_line = f"#define {expected_header_guard}\n"
+
+ if comment_style == 'double_slash':
+ expected_last_line = f"#endif // {expected_header_guard}\n"
+ elif comment_style == 'slash_asterix':
+ expected_last_line = f"#endif /* {expected_header_guard} */\n"
+
+ empty_line = "\n"
+
+ if looks_like_header_guard(start_line) and is_define(next_line) and is_endif(last_line):
+ # modify the current header guard if necessary
+ lines = lines[:start_index] + [expected_start_line, expected_next_line] + \
+ lines[start_index+2:last_index] + [expected_last_line] + lines[last_index+1:]
+
+ guards_updated = (start_line != expected_start_line) or (next_line != expected_next_line) \
+ or (last_line != expected_last_line)
+ else:
+ # header guard could not be detected, add header guards
+ lines = lines[:start_index] + [empty_line, expected_start_line, expected_next_line] + \
+ [empty_line] + lines[start_index:] + [empty_line, expected_last_line]
+
+
+ return lines, guards_updated
+
+
+def find_expected_header_guard(filepath: str, prefix: str, add_extension: str, drop_outermost_subdir: str) -> str:
+ if drop_outermost_subdir:
+ arr : List[str] = filepath.split("/")
+ arr = arr[min(1, len(arr)-1):]
+ filepath = "/".join(arr)
+
+ if not add_extension:
+ filepath = ".".join(filepath.split(".")[:-1])
+
+ guard = filepath.replace("/", "_").replace(".", "_").upper() # snake case full path
+ return prefix + "_" + guard
+
+
+def skip_file(filepath: str, extensions: List[str], exclude: List[str], include: List[str]) -> bool:
+ extension = filepath.split(".")[-1]
+
+ if extension.lower() not in extensions:
+ return True
+
+ if exclude and any([filepath.startswith(exc) for exc in exclude]):
+ print(exclude)
+ return True
+
+ if include:
+ return not any([filepath.startswith(inc) for inc in include])
+
+ return False
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ description="Header Guard Checker. It adds full path snake case header guards with or without extension.",
+ )
+
+ parser.add_argument("files", type=str, nargs="+", help="Files to check the header guards")
+ parser.add_argument("--extensions", type=str, help="Comma separated list of extensions to run the checks. \
+ If the input file does not have any of the extensions, it'll be skipped", required=True)
+ parser.add_argument("--comment_style", choices=['double_slash', 'slash_asterix'], required=True)
+ parser.add_argument("--exclude", type=str, help="Comma separated list of paths to exclude from header guard checks", default="")
+ parser.add_argument("--include", type=str, help="Comma separated list of paths to include. Defaults to empty string, \
+ which means all the paths are included", default="")
+ parser.add_argument("--prefix", help="Prefix to apply to header guards", required=True)
+ parser.add_argument("--add_extension", action="store_true", help="If true, it adds the file extension to the end of the guard")
+ parser.add_argument("--drop_outermost_subdir", action="store_true", help="If true, it'll not use the outermost folder in the path. \
+ This is intended for using in subdirs with different rules")
+
+ args = parser.parse_args()
+
+ files = args.files
+ extensions = args.extensions.split(",")
+ exclude = args.exclude.split(",") if args.exclude != '' else []
+ include = args.include.split(",") if args.include != '' else []
+ prefix = args.prefix
+ add_extension = args.add_extension
+ drop_outermost_subdir = args.drop_outermost_subdir
+ comment_style = args.comment_style
+
+ logging_level = logging.INFO
+ logging.basicConfig(level=logging_level)
+
+ retval = 0
+ for file in files:
+ if skip_file(file, extensions, exclude, include):
+ logger.info(f"File {file} is SKIPPED")
+ continue
+
+ expected_header_guard : str = find_expected_header_guard(file, prefix, add_extension, drop_outermost_subdir)
+
+ with open(file, "r") as fd:
+ lines: List = fd.readlines()
+
+ new_lines, guards_updated = fix_header_guard(lines, expected_header_guard, comment_style)
+
+ with open(file, "w") as fd:
+ fd.writelines([f"{line}" for line in new_lines])
+
+ if guards_updated:
+ logger.info("File has been modified")
+ retval = 1
+
+ exit(retval)
diff --git a/scripts/clang-tidy.h b/scripts/clang-tidy.h
index b3705122c6..24e4b15c6f 100644
--- a/scripts/clang-tidy.h
+++ b/scripts/clang-tidy.h
@@ -1,5 +1,12 @@
#include <arm_neon.h>
+#if __arm__
+inline uint32x4_t vpaddq_u32(uint32x4_t, uint32x4_t)
+{
+ return vdupq_n_u32(0);
+}
+#endif
+
inline float16x4_t vrsqrts_f16 (float16x4_t, float16x4_t)
{
return vdup_n_f16(0);
diff --git a/scripts/clang_tidy_rules.py b/scripts/clang_tidy_rules.py
index 8ab7c13a7c..f244017dbd 100755
--- a/scripts/clang_tidy_rules.py
+++ b/scripts/clang_tidy_rules.py
@@ -1,10 +1,35 @@
#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2017-2024 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
import os
import re
import sys
def get_list_includes():
- return "src/core/cpu/kernels/assembly " \
+ return "compute_kernel_writer/include " \
+ "src/cpu/kernels/assembly " \
"src/core/NEON/kernels/assembly " \
"src/core/NEON/kernels/convolution/winograd " \
"include/linux include " \
@@ -15,8 +40,10 @@ def get_list_flags( filename, arch):
flags = ["-std=c++14"]
flags.append("-DARM_COMPUTE_CPP_SCHEDULER=1")
flags.append("-DARM_COMPUTE_CL")
+ flags.append("-DARM_COMPUTE_OPENCL_ENABLED")
if arch == "aarch64":
flags.append("-DARM_COMPUTE_AARCH64_V8_2")
+
return flags
def filter_files( list_files ):
@@ -45,6 +72,9 @@ def filter_clang_tidy_lines( lines ):
if "/arm_gemm/" in line:
continue
+ if "compute_kernel_writer/" in line:
+ continue
+
if "/convolution/" in line:
continue
diff --git a/scripts/ensure_single_eol.py b/scripts/ensure_single_eol.py
new file mode 100755
index 0000000000..0eb105e091
--- /dev/null
+++ b/scripts/ensure_single_eol.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2023 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+This script ensures a file ends with exactly one end-of-line character
+"""
+
+import argparse
+import pathlib as pl
+import os
+
+
+def main(args):
+ f_p = pl.Path(args.file)
+ with open(f_p, "r") as f:
+ lines = f.read()
+ lines = lines.rstrip(os.linesep)
+ lines += os.linesep
+ with open(f_p, "w") as f:
+ f.write(lines)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("file")
+ args = parser.parse_args()
+ main(args)
diff --git a/scripts/format_code.py b/scripts/format_code.py
new file mode 100755
index 0000000000..8bfb3f5601
--- /dev/null
+++ b/scripts/format_code.py
@@ -0,0 +1,426 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2023-2024 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import argparse
+import datetime
+import difflib
+import filecmp
+import logging
+import os
+import re
+import subprocess
+import sys
+
+from modules.Shell import Shell
+
+logger = logging.getLogger("format_code")
+
+# List of directories to exclude
+exceptions = [
+ "src/core/NEON/kernels/assembly/gemm",
+ "src/core/NEON/kernels/assembly/arm",
+ "/winograd/",
+ "/convolution/",
+ "/arm_gemm/",
+ "/arm_conv/",
+ "SConscript",
+ "SConstruct"
+]
+
+def adjust_copyright_year(copyright_years, curr_year):
+ ret_copyright_year = str()
+ # Read last year in the Copyright
+ last_year = int(copyright_years[-4:])
+ if last_year == curr_year:
+ ret_copyright_year = copyright_years
+ elif last_year == (curr_year - 1):
+ # Create range if latest year on the copyright is the previous
+ if len(copyright_years) > 4 and copyright_years[-5] == "-":
+ # Range already exists, update year to current
+ ret_copyright_year = copyright_years[:-5] + "-" + str(curr_year)
+ else:
+ # Create a new range
+ ret_copyright_year = copyright_years + "-" + str(curr_year)
+ else:
+ ret_copyright_year = copyright_years + ", " + str(curr_year)
+ return ret_copyright_year
+
+def check_copyright( filename ):
+ f = open(filename, "r")
+ content = f.readlines()
+ f.close()
+ f = open(filename, "w")
+ year = datetime.datetime.now().year
+ ref = open("scripts/copyright_mit.txt","r").readlines()
+
+ # Need to handle python files separately
+ if("SConstruct" in filename or "SConscript" in filename):
+ start = 2
+ if("SConscript" in filename):
+ start = 3
+ m = re.match(r"(# Copyright \(c\) )(.*\d{4})( [Arm|ARM].*)", content[start])
+ line = m.group(1)
+
+ if m.group(2): # Is there a year already?
+ # Yes: adjust accordingly
+ line += adjust_copyright_year(m.group(2), year)
+ else:
+ # No: add current year
+ line += str(year)
+ line += m.group(3).replace("ARM", "Arm")
+ if("SConscript" in filename):
+ f.write('#!/usr/bin/python\n')
+
+ f.write('# -*- coding: utf-8 -*-\n\n')
+ f.write(line+"\n")
+ # Copy the rest of the file's content:
+ f.write("".join(content[start + 1:]))
+ f.close()
+
+ return
+
+ # This only works until year 9999
+ m = re.match(r"(.*Copyright \(c\) )(.*\d{4})( [Arm|ARM].*)", content[1])
+ start =len(ref)+2
+ if content[0] != "/*\n" or not m:
+ start = 0
+ f.write("/*\n * Copyright (c) %d Arm Limited.\n" % year)
+ else:
+ logger.debug("Found Copyright start")
+ logger.debug("\n\t".join([ g or "" for g in m.groups()]))
+ line = m.group(1)
+
+ if m.group(2): # Is there a year already?
+ # Yes: adjust accordingly
+ line += adjust_copyright_year(m.group(2), year)
+ else:
+ # No: add current year
+ line += str(year)
+ line += m.group(3).replace("ARM", "Arm")
+ f.write("/*\n"+line+"\n")
+ logger.debug(line)
+ # Write out the rest of the Copyright header:
+ for i in range(1, len(ref)):
+ line = ref[i]
+ f.write(" *")
+ if line.rstrip() != "":
+ f.write(" %s" % line)
+ else:
+ f.write("\n")
+ f.write(" */\n")
+ # Copy the rest of the file's content:
+ f.write("".join(content[start:]))
+ f.close()
+
+def check_license(filename):
+ """
+ Check that the license file is up-to-date
+ """
+ f = open(filename, "r")
+ content = f.readlines()
+ f.close()
+
+ f = open(filename, "w")
+ f.write("".join(content[:2]))
+
+ year = datetime.datetime.now().year
+ # This only works until year 9999
+ m = re.match(r"(.*Copyright \(c\) )(.*\d{4})( [Arm|ARM].*)", content[2])
+
+ if not m:
+ f.write("Copyright (c) {} Arm Limited\n".format(year))
+ else:
+ updated_year = adjust_copyright_year(m.group(2), year)
+ f.write("Copyright (c) {} Arm Limited\n".format(updated_year))
+
+ # Copy the rest of the file's content:
+ f.write("".join(content[3:]))
+ f.close()
+
+
+class OtherChecksRun:
+ def __init__(self, folder, error_diff=False, strategy="all"):
+ self.folder = folder
+ self.error_diff=error_diff
+ self.strategy = strategy
+
+ def error_on_diff(self, msg):
+ retval = 0
+ if self.error_diff:
+ diff = self.shell.run_single_to_str("git diff")
+ if len(diff) > 0:
+ retval = -1
+ logger.error(diff)
+ logger.error("\n"+msg)
+ return retval
+
+ def run(self):
+ retval = 0
+ self.shell = Shell()
+ self.shell.save_cwd()
+ this_dir = os.path.dirname(__file__)
+ self.shell.cd(self.folder)
+ self.shell.prepend_env("PATH","%s/../bin" % this_dir)
+
+ to_check = ""
+ if self.strategy != "all":
+ to_check, skip_copyright = FormatCodeRun.get_files(self.folder, self.strategy)
+ #FIXME: Exclude shaders!
+
+ logger.info("Running ./scripts/format_doxygen.py")
+ logger.debug(self.shell.run_single_to_str("./scripts/format_doxygen.py %s" % " ".join(to_check)))
+ retval = self.error_on_diff("Doxygen comments badly formatted (check above diff output for more details) try to run ./scripts/format_doxygen.py on your patch and resubmit")
+ if retval == 0:
+ logger.info("Running ./scripts/include_functions_kernels.py")
+ logger.debug(self.shell.run_single_to_str("python ./scripts/include_functions_kernels.py"))
+ retval = self.error_on_diff("Some kernels or functions are not included in their corresponding master header (check above diff output to see which includes are missing)")
+ if retval == 0:
+ try:
+ logger.info("Running ./scripts/check_bad_style.sh")
+ logger.debug(self.shell.run_single_to_str("./scripts/check_bad_style.sh"))
+ #logger.debug(self.shell.run_single_to_str("./scripts/check_bad_style.sh %s" % " ".join(to_check)))
+ except subprocess.CalledProcessError as e:
+ logger.error("Command %s returned:\n%s" % (e.cmd, e.output))
+ retval -= 1
+
+ if retval != 0:
+ raise Exception("format-code failed with error code %d" % retval)
+
+class FormatCodeRun:
+ @staticmethod
+ def get_files(folder, strategy="git-head"):
+ shell = Shell()
+ shell.cd(folder)
+ skip_copyright = False
+ if strategy == "git-head":
+ cmd = "git diff-tree --no-commit-id --name-status -r HEAD | grep \"^[AMRT]\" | cut -f 2"
+ elif strategy == "git-diff":
+ cmd = "git diff --name-status --cached -r HEAD | grep \"^[AMRT]\" | rev | cut -f 1 | rev"
+ else:
+ cmd = "git ls-tree -r HEAD --name-only"
+ # Skip copyright checks when running on all files because we don't know when they were last modified
+ # Therefore we can't tell if their copyright dates are correct
+ skip_copyright = True
+
+ grep_folder = "grep -e \"^\\(arm_compute\\|src\\|examples\\|tests\\|utils\\|support\\)/\""
+ grep_extension = "grep -e \"\\.\\(cpp\\|h\\|hh\\|inl\\|cl\\|cs\\|hpp\\)$\""
+ list_files = shell.run_single_to_str(cmd+" | { "+ grep_folder+" | "+grep_extension + " || true; }")
+ to_check = [ f for f in list_files.split("\n") if len(f) > 0]
+
+ # Check for scons files as they are excluded from the above list
+ list_files = shell.run_single_to_str(cmd+" | { grep -e \"SC\" || true; }")
+ to_check += [ f for f in list_files.split("\n") if len(f) > 0]
+
+ return (to_check, skip_copyright)
+
+ def __init__(self, files, folder, error_diff=False, skip_copyright=False):
+ self.files = files
+ self.folder = folder
+ self.skip_copyright = skip_copyright
+ self.error_diff=error_diff
+
+ def error_on_diff(self, msg):
+ retval = 0
+ if self.error_diff:
+ diff = self.shell.run_single_to_str("git diff")
+ if len(diff) > 0:
+ retval = -1
+ logger.error(diff)
+ logger.error("\n"+msg)
+ return retval
+
+ def run(self):
+ if len(self.files) < 1:
+ logger.debug("No file: early exit")
+ retval = 0
+ self.shell = Shell()
+ self.shell.save_cwd()
+ this_dir = os.path.dirname(__file__)
+ try:
+ self.shell.cd(self.folder)
+ self.shell.prepend_env("PATH","%s/../bin" % this_dir)
+
+ for f in self.files:
+ if not self.skip_copyright:
+ check_copyright(f)
+
+ skip_this_file = False
+ for e in exceptions:
+ if e in f:
+ logger.warning("Skipping '%s' file: %s" % (e,f))
+ skip_this_file = True
+ break
+ if skip_this_file:
+ continue
+
+ logger.info("Formatting %s" % f)
+
+ check_license("LICENSE")
+
+ except subprocess.CalledProcessError as e:
+ retval = -1
+ logger.error(e)
+ logger.error("OUTPUT= %s" % e.output)
+
+ retval += self.error_on_diff("See above for clang-tidy errors")
+
+ if retval != 0:
+ raise Exception("format-code failed with error code %d" % retval)
+
+class GenerateAndroidBP:
+ def __init__(self, folder):
+ self.folder = folder
+ self.bp_output_file = "Generated_Android.bp"
+
+ def run(self):
+ retval = 0
+ self.shell = Shell()
+ self.shell.save_cwd()
+ this_dir = os.path.dirname(__file__)
+
+ logger.debug("Running Android.bp check")
+ try:
+ self.shell.cd(self.folder)
+ cmd = "%s/generate_android_bp.py --folder %s --output_file %s" % (this_dir, self.folder, self.bp_output_file)
+ output = self.shell.run_single_to_str(cmd)
+ if len(output) > 0:
+ logger.info(output)
+ except subprocess.CalledProcessError as e:
+ retval = -1
+ logger.error(e)
+ logger.error("OUTPUT= %s" % e.output)
+
+ # Compare the genereated file with the one in the review
+ if not filecmp.cmp(self.bp_output_file, self.folder + "/Android.bp"):
+ is_mismatched = True
+
+ with open(self.bp_output_file, 'r') as generated_file:
+ with open(self.folder + "/Android.bp", 'r') as review_file:
+ diff = list(difflib.unified_diff(generated_file.readlines(), review_file.readlines(),
+ fromfile='Generated_Android.bp', tofile='Android.bp'))
+
+ # If the only mismatch in Android.bp file is the copyright year,
+ # the content of the file is considered unchanged and we don't need to update
+ # the copyright year. This will resolve the issue that emerges every new year.
+ num_added_lines = 0
+ num_removed_lines = 0
+ last_added_line = ""
+ last_removed_line = ""
+ expect_add_line = False
+
+ for line in diff:
+ if line.startswith("-") and not line.startswith("---"):
+ num_removed_lines += 1
+ if num_removed_lines > 1:
+ break
+ last_removed_line = line
+ expect_add_line = True
+ elif line.startswith("+") and not line.startswith("+++"):
+ num_added_lines += 1
+ if num_added_lines > 1:
+ break
+ if expect_add_line:
+ last_added_line = line
+ else:
+ expect_add_line = False
+
+ if num_added_lines == 1 and num_removed_lines == 1:
+ re_copyright = re.compile("^(?:\+|\-)// Copyright © ([0-9]+)\-([0-9]+) Arm Ltd. All rights reserved.\n$")
+ generated_matches = re_copyright.search(last_removed_line)
+ review_matches = re_copyright.search(last_added_line)
+
+ if generated_matches is not None and review_matches is not None:
+ if generated_matches.group(1) == review_matches.group(1) and \
+ int(generated_matches.group(2)) > int(review_matches.group(2)):
+ is_mismatched = False
+
+ if is_mismatched:
+ logger.error("Lines with '-' need to be added to Android.bp")
+ logger.error("Lines with '+' need to be removed from Android.bp")
+
+ for line in diff:
+ logger.error(line.rstrip())
+ if is_mismatched:
+ raise Exception("Android bp file is not updated")
+
+ if retval != 0:
+ raise Exception("generate Android bp file failed with error code %d" % retval)
+
+def run_fix_code_formatting( files="git-head", folder=".", num_threads=1, error_on_diff=True):
+ try:
+ retval = 0
+
+ # Genereate Android.bp file and test it
+ gen_android_bp = GenerateAndroidBP(folder)
+ gen_android_bp.run()
+
+ to_check, skip_copyright = FormatCodeRun.get_files(folder, files)
+ other_checks = OtherChecksRun(folder,error_on_diff, files)
+ other_checks.run()
+
+ logger.debug(to_check)
+ num_files = len(to_check)
+ per_thread = max( num_files / num_threads,1)
+ start=0
+ logger.info("Files to format:\n\t%s" % "\n\t".join(to_check))
+
+ for i in range(num_threads):
+ if i == num_threads -1:
+ end = num_files
+ else:
+ end= min(start+per_thread, num_files)
+ sub = to_check[start:end]
+ logger.debug("[%d] [%d,%d] %s" % (i, start, end, sub))
+ start = end
+ format_code_run = FormatCodeRun(sub, folder, skip_copyright=skip_copyright)
+ format_code_run.run()
+
+ return retval
+ except Exception as e:
+ logger.error("Exception caught in run_fix_code_formatting: %s" % e)
+ return -1
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ description="Build & run pre-commit tests",
+ )
+
+ file_sources=["git-diff","git-head","all"]
+ parser.add_argument("-D", "--debug", action='store_true', help="Enable script debugging output")
+ parser.add_argument("--error_on_diff", action='store_true', help="Show diff on error and stop")
+ parser.add_argument("--files", nargs='?', metavar="source", choices=file_sources, help="Which files to run fix_code_formatting on, choices=%s" % file_sources, default="git-head")
+ parser.add_argument("--folder", metavar="path", help="Folder in which to run fix_code_formatting", default=".")
+
+ args = parser.parse_args()
+
+ logging_level = logging.INFO
+ if args.debug:
+ logging_level = logging.DEBUG
+
+ logging.basicConfig(level=logging_level)
+
+ logger.debug("Arguments passed: %s" % str(args.__dict__))
+
+ exit(run_fix_code_formatting(args.files, args.folder, 1, error_on_diff=args.error_on_diff))
diff --git a/scripts/format_doxygen.py b/scripts/format_doxygen.py
index 5882958fc5..8ac5e630b9 100755
--- a/scripts/format_doxygen.py
+++ b/scripts/format_doxygen.py
@@ -81,7 +81,7 @@ def process_comment(fd, comment, first_param, last_param):
if __name__ == "__main__":
n_file=0
- if len(sys.argv) == 1:
+ if len(sys.argv) == 2 and sys.argv[1] == '--all':
paths = []
for top_level in ["./arm_compute", "./src", "./examples", "./tests", "./utils", "./framework", "./support"]:
diff --git a/scripts/generate_android_bp.py b/scripts/generate_android_bp.py
new file mode 100755
index 0000000000..d5b268f522
--- /dev/null
+++ b/scripts/generate_android_bp.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2023-2024 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import argparse
+import os
+from jinja2 import Template
+import datetime
+
+# Paths to exclude
+excluded_paths = ["build",
+ "compute_kernel_writer/",
+ "src/dynamic_fusion/runtime/gpu/cl/ckw_driver/",
+ "src/dynamic_fusion/sketch/gpu/ckw_driver/",
+ "docs/",
+ "documentation/",
+ "examples/",
+ "opencl-1.2-stubs/",
+ "release_repository/",
+ "opengles-3.1-stubs/",
+ "scripts/",
+ "tests/",
+ "/GLES_COMPUTE/",
+ "/graph/",
+ "/sve/",
+ "/SVE/",
+ "/sve2/",
+ "/SVE2/",
+ "/sme/",
+ "/sme2/",
+ ]
+
+excluded_files = ["TracePoint.cpp"]
+
+# Android bp template to render
+year = datetime.datetime.now().year
+
+bp_tm = Template(
+"""//
+// Copyright © 2020-""" + str(year) + """ Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+// OpenCL sources are NOT required by ArmNN or its Android NNAPI driver and are used for CI purposes only.
+opencl_srcs = [
+ {% for cl_src in cl_srcs -%}
+ "{{ cl_src }}",
+ {% endfor %}
+]
+
+bootstrap_go_package {
+ name: "arm_compute_library_nn_driver",
+ pkgPath: "arm_compute_library_nn_driver",
+ deps: [
+ "blueprint",
+ "blueprint-pathtools",
+ "blueprint-proptools",
+ "soong",
+ "soong-android",
+ "soong-cc",
+ ],
+ srcs: [
+ "scripts/arm_compute_library_nn_driver.go",
+ ],
+ pluginFor: [ "soong_build" ],
+}
+
+arm_compute_library_defaults {
+ name: "acl-default-cppflags",
+ cppflags: [
+ "-std=c++14",
+ "-fexceptions",
+ "-DBOOST_NO_AUTO_PTR",
+ "-DEMBEDDED_KERNELS",
+ "-DARM_COMPUTE_ASSERTS_ENABLED",
+ "-DARM_COMPUTE_CPP_SCHEDULER",
+ "-DENABLE_NEON",
+ "-DARM_COMPUTE_ENABLE_NEON",
+ "-Wno-unused-parameter",
+ "-DNO_DOT_IN_TOOLCHAIN",
+ "-Wno-implicit-fallthrough",
+ "-fPIC"
+ ],
+ rtti: true,
+}
+
+cc_library_static {
+ name: "arm_compute_library",
+ defaults: ["acl-default-cppflags"],
+ proprietary: true,
+ local_include_dirs: ["build/android-arm64v8a/src/core",
+ "build/android-arm64v8a/src/core/CL",
+ "compute_kernel_writer/include",
+ "src/core/common",
+ "src/core/helpers",
+ "src/core/NEON/kernels/arm_gemm",
+ "src/core/NEON/kernels/assembly",
+ "src/core/NEON/kernels/convolution/common",
+ "src/core/NEON/kernels/convolution/winograd",
+ "src/cpu/kernels/assembly"],
+ export_include_dirs: [".", "./include"],
+ srcs: [
+ {% for src in srcs -%}
+ "{{ src }}",
+ {% endfor %}
+ ],
+ arch: {
+ arm: {
+ srcs: [
+ {% for arm_src in arm_srcs -%}
+ "{{ arm_src }}",
+ {% endfor %}
+ ],
+ },
+ arm64: {
+ srcs: [
+ {% for arm64_src in arm64_srcs -%}
+ "{{ arm64_src }}",
+ {% endfor %}
+ ],
+ },
+ },
+ rtti: true,
+}
+""")
+
+
+def generate_bp_file(cpp_files, opencl_files):
+ arm_files = [f for f in cpp_files if "a32_" in f]
+ arm64_files = [f for f in cpp_files if any(a64 in f for a64 in ["a64_", "sve_", 'sme_', 'sme2_'])]
+ gen_files = [x for x in cpp_files if x not in arm_files + arm64_files]
+
+ arm_files.sort()
+ arm64_files.sort()
+ gen_files.sort()
+ opencl_files.sort()
+
+ bp_file = bp_tm.render(srcs=gen_files,
+ arm_srcs=arm_files,
+ arm64_srcs=arm64_files,
+ cl_srcs=opencl_files)
+ return bp_file
+
+
+def list_all_files(repo_path):
+ """ Gets the list of files to include to the Android.bp
+
+ :param repo_path: Path of the repository
+ :return: The filtered list of useful filess
+ """
+ if not repo_path.endswith('/'):
+ repo_path = repo_path + "/"
+
+ # Get cpp files
+ cpp_files = []
+ cl_files = []
+ for path, subdirs, files in os.walk(repo_path):
+ for file in files:
+ if file.endswith(".cpp"):
+ cpp_files.append(os.path.join(path, file))
+ elif file.endswith(".cl"):
+ cl_files.append(os.path.join(path, file))
+ # Include CL headers
+ if "src/core/CL/cl_kernels" in path and file.endswith(".h"):
+ cl_files.append(os.path.join(path, file))
+ # Filter out unused cpp files
+ filtered_cpp_files = []
+ for cpp_file in cpp_files:
+ if any(ep in cpp_file for ep in excluded_paths) or any(ef in cpp_file for ef in excluded_files):
+ continue
+ filtered_cpp_files.append(cpp_file.replace(repo_path, ""))
+ # Filter out unused cl files
+ filtered_cl_files = []
+ for cl_file in cl_files:
+ if any(ep in cl_file for ep in excluded_paths):
+ continue
+ filtered_cl_files.append(cl_file.replace(repo_path, ""))
+
+ return filtered_cpp_files, filtered_cl_files
+
+
+if __name__ == "__main__":
+ # Parse arguments
+ parser = argparse.ArgumentParser('Generate Android.bp file for ComputeLibrary')
+ parser.add_argument('--folder', default=".", metavar="folder", dest='folder', type=str, required=False, help='Compute Library source path')
+ parser.add_argument('--output_file', metavar="output_file", default='Android.bp', type=str, required=False, help='Specify Android bp output file')
+ args = parser.parse_args()
+
+ cpp_files, opencl_files = list_all_files(args.folder)
+ bp_file = generate_bp_file(cpp_files, opencl_files)
+
+ with open(args.output_file, 'w') as f:
+ f.write(bp_file)
diff --git a/scripts/generate_build_files.py b/scripts/generate_build_files.py
new file mode 100644
index 0000000000..f88cf1af44
--- /dev/null
+++ b/scripts/generate_build_files.py
@@ -0,0 +1,294 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2023-2024 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Generates build files for either bazel or cmake experimental builds using filelist.json
+Usage
+ python scripts/generate_build_files.py --bazel
+ python scripts/generate_build_files.py --cmake
+
+Writes generated file to the bazel BUILD file located under src/ if using --bazel flag.
+Writes generated file to the CMake CMakeLists.txt file located under src/ if using --cmake flag.
+"""
+
+import argparse
+import json
+import glob
+
+
+def get_operator_backend_files(filelist, operators, backend='', techs=[], attrs=[]):
+ files = {"common": []}
+
+ # Early return if filelist is empty
+ if backend not in filelist:
+ return files
+
+ # Iterate over operators and create the file lists to compiler
+ for operator in operators:
+ if operator in filelist[backend]['operators']:
+ files['common'] += filelist[backend]['operators'][operator]["files"]["common"]
+ for tech in techs:
+ if tech in filelist[backend]['operators'][operator]["files"]:
+ # Add tech as a key to dictionary if not there
+ if tech not in files:
+ files[tech] = []
+
+ # Add tech files to the tech file list
+ tech_files = filelist[backend]['operators'][operator]["files"][tech]
+ files[tech] += tech_files.get('common', [])
+ for attr in attrs:
+ files[tech] += tech_files.get(attr, [])
+
+ # Remove duplicates if they exist
+ return {k: list(set(v)) for k, v in files.items()}
+
+
+def collect_operators(filelist, operators, backend=''):
+ ops = set()
+ for operator in operators:
+ if operator in filelist[backend]['operators']:
+ ops.add(operator)
+ if 'deps' in filelist[backend]['operators'][operator]:
+ ops.update(filelist[backend]['operators'][operator]['deps'])
+ else:
+ print("Operator {0} is unsupported on {1} backend!".format(
+ operator, backend))
+
+ return ops
+
+
+def resolve_operator_dependencies(filelist, operators, backend=''):
+ resolved_operators = collect_operators(filelist, operators, backend)
+
+ are_ops_resolved = False
+ while not are_ops_resolved:
+ resolution_pass = collect_operators(
+ filelist, resolved_operators, backend)
+ if len(resolution_pass) != len(resolved_operators):
+ resolved_operators.update(resolution_pass)
+ else:
+ are_ops_resolved = True
+
+ return resolved_operators
+
+def get_template_header():
+ return """# Copyright (c) 2023-2024 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE."""
+
+def build_from_template_bazel(srcs_graph, srcs_sve, srcs_sve2, srcs_core):
+
+ line_separator = '",\n\t"'
+
+ template = f"""{get_template_header()}
+
+filegroup(
+ name = "arm_compute_graph_srcs",
+ srcs = ["{line_separator.join(srcs_graph)}"] +
+ glob(["**/*.h",
+ "**/*.hpp",
+ "**/*.inl"]),
+ visibility = ["//visibility:public"]
+)
+
+filegroup(
+ name = "arm_compute_sve2_srcs",
+ srcs = ["{line_separator.join(srcs_sve2)}"] +
+ glob(["**/*.h",
+ "**/*.hpp",
+ "**/*.inl"]),
+ visibility = ["//visibility:public"]
+)
+
+filegroup(
+ name = "arm_compute_sve_srcs",
+ srcs = ["{line_separator.join(srcs_sve)}"] +
+ glob(["**/*.h",
+ "**/*.hpp",
+ "**/*.inl"]),
+ visibility = ["//visibility:public"]
+)
+
+filegroup(
+ name = "arm_compute_srcs",
+ srcs = ["{line_separator.join(srcs_core)}"] +
+ glob(["**/*.h",
+ "**/*.hpp",
+ "**/*.inl"]),
+ visibility = ["//visibility:public"]
+)
+"""
+
+ return template
+
+
+def build_from_template_cmake(srcs_graph, srcs_sve, srcs_sve2, srcs_core):
+
+ line_separator = '\n\t'
+
+ template = f"""{get_template_header()}
+
+target_sources(
+ arm_compute_graph
+ PRIVATE
+ {line_separator.join(srcs_graph)}
+)
+
+target_sources(
+ arm_compute_sve
+ PRIVATE
+ {line_separator.join(srcs_sve)}
+)
+
+target_sources(
+ arm_compute_sve2
+ PRIVATE
+ {line_separator.join(srcs_sve2)}
+)
+
+target_sources(
+ arm_compute
+ PRIVATE
+ {line_separator.join(srcs_core)}
+)"""
+ return template
+
+
+def gather_sources():
+
+ # Source file list
+ with open("filelist.json") as fp:
+ filelist = json.load(fp)
+
+ # Common backend files
+ lib_files = filelist['common']
+
+ # Logging files
+ lib_files += filelist['logging']
+
+ # C API files
+ lib_files += filelist['c_api']['common']
+ lib_files += filelist['c_api']['operators']
+
+ # Scheduler infrastructure
+ lib_files += filelist['scheduler']['single']
+ # Add both cppthreads and omp sources for now
+ lib_files += filelist['scheduler']['threads']
+ lib_files += filelist['scheduler']['omp']
+
+ # Graph files
+ graph_files = glob.glob('src/graph/*.cpp')
+ graph_files += glob.glob('src/graph/*/*.cpp')
+
+ lib_files_sve = []
+ lib_files_sve2 = []
+
+ # -------------------------------------
+ # NEON files
+ lib_files += filelist['cpu']['common']
+ simd = ['neon', 'sve', 'sve2']
+
+ # Get attributes
+ data_types = ["qasymm8", "qasymm8_signed", "qsymm16",
+ "fp16", "fp32", "integer"]
+ data_layouts = ["nhwc", "nchw"]
+ fixed_format_kernels = ["fixed_format_kernels"]
+ attrs = data_types + data_layouts + \
+ fixed_format_kernels + ["estate64"]
+
+ # Setup data-type and data-layout files to include
+ cpu_operators = filelist['cpu']['operators'].keys()
+ cpu_ops_to_build = resolve_operator_dependencies(
+ filelist, cpu_operators, 'cpu')
+ cpu_files = get_operator_backend_files(
+ filelist, cpu_ops_to_build, 'cpu', simd, attrs)
+
+ # Shared among ALL CPU files
+ lib_files += cpu_files.get('common', [])
+
+ # Arm® Neon™ specific files
+ lib_files += cpu_files.get('neon', [])
+
+ # SVE files only
+ lib_files_sve = cpu_files.get('sve', [])
+
+ # SVE2 files only
+ lib_files_sve2 = cpu_files.get('sve2', [])
+
+ graph_files += glob.glob('src/graph/backends/NEON/*.cpp')
+
+ # -------------------------------------
+
+ graph_files = sorted([path.replace("src/", "") for path in graph_files])
+ lib_files_sve = sorted([path.replace("src/", "") for path in lib_files_sve])
+ lib_files_sve2 = sorted([path.replace("src/", "") for path in lib_files_sve2])
+ lib_files = sorted([path.replace("src/", "") for path in lib_files])
+
+ return graph_files, lib_files_sve, lib_files_sve2, lib_files
+
+
+if "__main__" in __name__:
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--bazel", action="store_true")
+ parser.add_argument("--cmake", action="store_true")
+ args = parser.parse_args()
+
+ graph_files, lib_files_sve, lib_files_sve2, lib_files = gather_sources()
+
+ if args.bazel:
+ # 8562a4ec: Remove CommonGraphOptions from Utils target and warnings
+ graph_files += ["//utils:CommonGraphOptions.cpp"]
+
+ bazel_build_string = build_from_template_bazel(
+ graph_files, lib_files_sve, lib_files_sve2, lib_files)
+ with open("src/BUILD.bazel", "w") as fp:
+ fp.write(bazel_build_string)
+
+ if args.cmake:
+ cmake_build_string = build_from_template_cmake(
+ graph_files, lib_files_sve, lib_files_sve2, lib_files)
+ with open("src/CMakeLists.txt", "w") as fp:
+ fp.write(cmake_build_string)
+
+ if not args.cmake and not args.bazel:
+ print("Supply either --bazel or --cmake flag to generate build files for corresponding build")
diff --git a/scripts/generate_documentation.sh b/scripts/generate_documentation.sh
new file mode 100644
index 0000000000..0a4097517e
--- /dev/null
+++ b/scripts/generate_documentation.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+# Copyright (c) 2024 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+# Generates the Doxygen documentation pages for Compute Library.
+# The script has to be run from the ComputeLibrary root directory.
+
+set -e
+set -u
+set -o pipefail
+
+doxygen docs/Doxyfile 2>&1 | awk '/DOXY_WARN/{ print $0; err=1 } END{ exit err }'
diff --git a/scripts/include_functions_kernels.py b/scripts/include_functions_kernels.py
index 82b40f0e36..49c12867e6 100644
--- a/scripts/include_functions_kernels.py
+++ b/scripts/include_functions_kernels.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2017-2018, 2020-2021 Arm Limited.
+# Copyright (c) 2017-2018, 2020-2021, 2023 Arm Limited.
#
# SPDX-License-Identifier: MIT
#
@@ -74,7 +74,7 @@ def create_include_list(folder):
files_path = folder + "/*.h"
files = glob.glob(files_path)
updated_files = [include_str + folder + "/" + x.rsplit('/',1)[1] + "\"\n" for x in files]
- updated_files.sort()
+ updated_files.sort(key=lambda x: x.lower())
return updated_files
@@ -86,7 +86,7 @@ def include_components(target, path, header_prefix, folder, subfolders=None):
include_list = create_include_list(target_path + folder)
for s in subfolders or []:
include_list += create_include_list( target_path + folder + "/" + s)
- include_list.sort()
+ include_list.sort(key=lambda x: x.lower())
lines = read_file(components_file)
lines, first_pos = remove_existing_includes(lines)
lines = add_updated_includes(lines, first_pos, include_list)
diff --git a/scripts/modules/Shell.py b/scripts/modules/Shell.py
new file mode 100755
index 0000000000..f3fd0bd242
--- /dev/null
+++ b/scripts/modules/Shell.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2023 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import os
+import logging
+import subprocess
+
+logger = logging.getLogger("Shell")
+
+class Shell:
+ def __init__(self, is_interactive=False):
+ self.line=""
+ self.env=os.environ.copy()
+ self.initial_path = self.env["PATH"]
+ self.save_cwd()
+ self.is_interactive = is_interactive
+
+ def reset_path(self):
+ self.env["PATH"]=self.initial_path
+
+ def set_env(self, key, value):
+ self.env[key] = value
+
+ def append_env(self, key, value):
+ logger.debug("Appending '%s' to '%s'" % (value, key))
+ if key not in list(self.env.keys()):
+ self.set_env(key,value)
+ else:
+ self.env[key] += ":"+value
+ def prepend_env(self, key, value):
+ logger.debug("Prepending '%s' to '%s'" % (value, key))
+ if key not in list(self.env.keys()):
+ self.set_env(key,value)
+ else:
+ self.env[key] = value+":"+self.env[key]
+ def run(self, cmd):
+ if isinstance(cmd, list):
+ for c in cmd:
+ self.run_single(c)
+ else:
+ self.run_single(cmd)
+ def run_to_str(self, cmd):
+ out = ""
+ if isinstance(cmd, list):
+ for c in cmd:
+ out += self.run_single_to_str(c)
+ else:
+ out = self.run_single_to_str(cmd)
+ return out
+ def cd(self, dirname):
+ os.chdir(dirname)
+
+ def save_cwd(self):
+ self.cwd = os.getcwd()
+
+ def restore_cwd(self):
+ self.cd( self.cwd )
+
+ def run_single_interactive(self,cmd):
+ subprocess.check_call(cmd, env=self.env,stderr=subprocess.STDOUT, shell=True)
+ logger.debug("%s returned" % cmd)
+
+ def run_single(self,cmd):
+ if self.is_interactive:
+ self.run_single_interactive(cmd)
+ else:
+ self.run_single_to_str(cmd)
+
+ def run_single_to_str_no_output_check(self,cmd):
+ try:
+ out = subprocess.check_output(cmd, env=self.env, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as cpe:
+ out = cpe.output
+ if (len(out.strip()) > 0):
+ logger.debug(out)
+ logger.debug("%s returned" % cmd)
+ return out
+
+ def run_single_to_str(self,cmd):
+ out = subprocess.check_output(cmd, env=self.env, stderr=subprocess.STDOUT, shell=True).decode('utf-8')
+ if (len(out.strip()) > 0):
+ logger.debug(out)
+ logger.debug("%s returned" % cmd)
+ return out
diff --git a/scripts/print_version_file.py b/scripts/print_version_file.py
new file mode 100644
index 0000000000..a7654ff997
--- /dev/null
+++ b/scripts/print_version_file.py
@@ -0,0 +1,55 @@
+# Copyright (c) 2023 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+import sys
+import os
+
+VERSION = "v0.0-unreleased"
+
+
+def make_version_file(build_args, git_hash):
+ build_info = "\"arm_compute_version=%s Build options: %s Git hash=%s\"" % (
+ VERSION, build_args, git_hash.strip())
+ return build_info
+
+def make_version_file_from_sconscript(build_args, git_hash):
+ VERSION = "v0.0-unreleased"
+ fp = None
+ if os.path.exists("external/compute_library/SConscript"):
+ fp = "external/compute_library/SConscript"
+ elif os.path.exists("SConscript"):
+ fp = "SConscript"
+ if fp:
+ with open(fp) as scons_file:
+ for line in scons_file:
+ if "VERSION = " in line:
+ VERSION = line.split("=")[-1].strip().replace("\"", "")
+ break
+ return "\"arm_compute_version=%s Build options: %s Git hash=%s\"" % (
+ VERSION, build_args, git_hash.strip())
+
+if __name__ == "__main__":
+ if len(sys.argv) == 4 and sys.argv[3].lower() == "true":
+ print(make_version_file_from_sconscript(sys.argv[1], sys.argv[2]))
+ else:
+ print(make_version_file(sys.argv[1], sys.argv[2]))
+
diff --git a/scripts/tensorflow_data_extractor.py b/scripts/tensorflow_data_extractor.py
deleted file mode 100755
index 1dbf0e127e..0000000000
--- a/scripts/tensorflow_data_extractor.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-"""Extracts trainable parameters from Tensorflow models and stores them in numpy arrays.
-Usage
- python tensorflow_data_extractor -m path_to_binary_checkpoint_file -n path_to_metagraph_file
-
-Saves each variable to a {variable_name}.npy binary file.
-
-Note that since Tensorflow version 0.11 the binary checkpoint file which contains the values for each parameter has the format of:
- {model_name}.data-{step}-of-{max_step}
-instead of:
- {model_name}.ckpt
-When dealing with binary files with version >= 0.11, only pass {model_name} to -m option;
-when dealing with binary files with version < 0.11, pass the whole file name {model_name}.ckpt to -m option.
-
-Also note that this script relies on the parameters to be extracted being in the
-'trainable_variables' tensor collection. By default all variables are automatically added to this collection unless
-specified otherwise by the user. Thus should a user alter this default behavior and/or want to extract parameters from other
-collections, tf.GraphKeys.TRAINABLE_VARIABLES should be replaced accordingly.
-
-Tested with Tensorflow 1.2, 1.3 on Python 2.7.6 and Python 3.4.3.
-"""
-import argparse
-import numpy as np
-import os
-import tensorflow as tf
-
-
-if __name__ == "__main__":
- # Parse arguments
- parser = argparse.ArgumentParser('Extract Tensorflow net parameters')
- parser.add_argument('-m', dest='modelFile', type=str, required=True, help='Path to Tensorflow checkpoint binary\
- file. For Tensorflow version >= 0.11, only include model name; for Tensorflow version < 0.11, include\
- model name with ".ckpt" extension')
- parser.add_argument('-n', dest='netFile', type=str, required=True, help='Path to Tensorflow MetaGraph file')
- args = parser.parse_args()
-
- # Load Tensorflow Net
- saver = tf.train.import_meta_graph(args.netFile)
- with tf.Session() as sess:
- # Restore session
- saver.restore(sess, args.modelFile)
- print('Model restored.')
- # Save trainable variables to numpy arrays
- for t in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
- varname = t.name
- if os.path.sep in t.name:
- varname = varname.replace(os.path.sep, '_')
- print("Renaming variable {0} to {1}".format(t.name, varname))
- print("Saving variable {0} with shape {1} ...".format(varname, t.shape))
- # Dump as binary
- np.save(varname, sess.run(t))
diff --git a/scripts/update_supported_ops.py b/scripts/update_supported_ops.py
new file mode 100644
index 0000000000..c39c8bc827
--- /dev/null
+++ b/scripts/update_supported_ops.py
@@ -0,0 +1,414 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2024 Arm Limited.
+#
+# SPDX-License-Identifier: MIT
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+Updates the Doxygen documentation pages with a table of operators supported by Compute Library.
+
+The script builds up a table in XML format internally containing the different operators and their respective supported
+compute backends, data types and layouts, and the equivalent operator in the Android Neural Networks API. The list of
+operators is pulled from the OperatorList.h header file and further implementation details are provided in the function
+headers for the backend-specific operator e.g., NEStridedSlice.h.
+
+Usage:
+ python update_supported_ops.py
+"""
+
+import argparse
+import logging
+import re
+from enum import Enum
+from pathlib import Path
+
+
+class States(Enum):
+ INIT = 0
+ DESCRIPTION = 1
+ DESCRIPTION_END = 2
+ IN_CLASS = 3
+ DATA_TYPE_START = 4
+ DATA_TYPE_END = 5
+ NN_OPERATOR = 6
+ NN_OPERATOR_END = 7
+ SKIP_OPERATOR = 8
+ DATA_LAYOUT_START = 9
+ DATA_LAYOUT_END = 10
+
+
+class OperatorsTable:
+ def __init__(self):
+ self.project_dir = Path(__file__).resolve().parents[1] # ComputeLibrary directory
+ self.xml = ""
+
+ def generate_operator_list(self):
+ operator_list_head_file = self.project_dir / "arm_compute" / "runtime" / "OperatorList.h"
+ neon_file_name_prefix = str(self.project_dir / "arm_compute" / "runtime" / "NEON" / "functions" / "NE")
+ cl_file_name_prefix = str(self.project_dir / "arm_compute" / "runtime" / "CL" / "functions" / "CL")
+
+ logging.debug(operator_list_head_file)
+
+ f = open(operator_list_head_file, 'r')
+ # Iterates over the lines of the file
+ state = States.INIT
+ operator_desc = ""
+ nn_op_list = []
+ for line in f:
+ # /** ActivationLayer
+ # *
+ # * Description:
+ # * Function to simulate an activation layer with the specified activation function.
+ # *
+ # * Equivalent Android NNAPI Op:
+ # * ANEURALNETWORKS_ELU
+ # * ANEURALNETWORKS_HARD_SWISH
+ # * ANEURALNETWORKS_LOGISTIC
+ # * ANEURALNETWORKS_RELU
+ # * ANEURALNETWORKS_RELU1
+ # * ANEURALNETWORKS_RELU6
+ # * ANEURALNETWORKS_TANH
+ # *
+ # */
+ # Check for "/**" of the start of the operator
+ r = re.search('^\s*/\*\*(.*)', line)
+ if r and state == States.INIT:
+ # Skip below ones
+ if re.search('.*\(not ported\)', line):
+ state = States.SKIP_OPERATOR
+ continue
+ if re.search('.*\(only CL\)', line):
+ state = States.SKIP_OPERATOR
+ continue
+ if re.search('.*\(no CL\)', line):
+ state = States.SKIP_OPERATOR
+ continue
+ if re.search('.*\(skip\)', line):
+ state = States.SKIP_OPERATOR
+ continue
+ # Check" */"
+ r = re.match('\s*\*/\s*$', line)
+ if r and state == States.SKIP_OPERATOR:
+ state = States.INIT
+ continue
+ # Check " *"
+ r = re.match('\s*\*\s*$', line)
+ if r and state == States.SKIP_OPERATOR:
+ continue
+ # Check non " *" lines
+ r = re.search('^\s*\*(.*)', line)
+ if r and state == States.SKIP_OPERATOR:
+ continue
+
+ # Check for "/**" of the start of the operator
+ r = re.search('^\s*/\*\*(.*)', line)
+ if r and state == States.INIT:
+ tmp = r.groups()[0]
+ class_name = tmp.strip()
+ logging.debug(class_name)
+ continue
+
+ # Check whether "Description: " exists
+ r = re.search('\s*\*\s*Description:\s*', line)
+ if r and state == States.INIT:
+ state = States.DESCRIPTION
+ continue
+ # Treat description ends with a blank line only with " *"
+ r = re.match('\s*\*\s*$', line)
+ if r and state == States.DESCRIPTION:
+ logging.debug(operator_desc)
+ state = States.DESCRIPTION_END
+ continue
+ # Find continuing class description in the following lines
+ r = re.search('^\s*\*(.*)', line)
+ if r and state == States.DESCRIPTION:
+ tmp = r.groups()[0]
+ operator_desc = operator_desc + ' ' + tmp.strip()
+ continue
+
+ # Check whether "Equivalent AndroidNN Op: " exists
+ r = re.search('\s*\*\s*Equivalent Android NNAPI Op:\s*', line)
+ if r and state == States.DESCRIPTION_END:
+ state = States.NN_OPERATOR
+ continue
+ # Treat AndroidNN Op ends with a blank line only with " *"
+ r = re.match('\s*\*\s*$', line)
+ if r and state == States.NN_OPERATOR:
+ logging.debug(nn_op_list)
+ state = States.NN_OPERATOR_END
+ # Check NE#class_name
+ neon_file_name = neon_file_name_prefix + class_name + ".h"
+ logging.debug(neon_file_name)
+ # Check CL#class_name
+ cl_file_name = cl_file_name_prefix + class_name + ".h"
+ logging.debug(cl_file_name)
+ # Check whether CL/Neon file exists
+ if Path(neon_file_name).is_file() and Path(cl_file_name).is_file():
+ if neon_file_name.find("NEElementwiseOperations.h") != -1:
+ logging.debug(neon_file_name)
+ self.generate_operator_common_info(class_name, operator_desc, nn_op_list, "13")
+ elif neon_file_name.find("NEElementwiseUnaryLayer.h") != -1:
+ logging.debug(neon_file_name)
+ self.generate_operator_common_info(class_name, operator_desc, nn_op_list, "8")
+ else:
+ self.generate_operator_common_info(class_name, operator_desc, nn_op_list, "2")
+ self.generate_operator_info(neon_file_name)
+ self.generate_operator_cl_begin()
+ self.generate_operator_info(cl_file_name)
+ else:
+ if neon_file_name.find("NELogical.h") != -1:
+ logging.debug(neon_file_name)
+ self.generate_operator_common_info(class_name, operator_desc, nn_op_list, "3")
+ else:
+ self.generate_operator_common_info(class_name, operator_desc, nn_op_list, "1")
+ if Path(neon_file_name).is_file():
+ self.generate_operator_info(neon_file_name)
+ if Path(cl_file_name).is_file():
+ self.generate_operator_info(cl_file_name)
+ continue
+
+ # Find continuing AndroidNN Op in the following lines
+ r = re.search('^\s*\*(.*)', line)
+ if r and state == States.NN_OPERATOR:
+ tmp = r.groups()[0]
+ nn_op = tmp.strip()
+ nn_op_list.append(nn_op)
+ continue
+
+ # Treat operator ends with a blank line only with " */"
+ r = re.match('\s*\*/\s*$', line)
+ if r and state == States.NN_OPERATOR_END:
+ operator_desc = ""
+ nn_op_list = []
+ state = States.INIT
+ continue
+ f.close()
+
+ def generate_operator_info(self, file_name):
+ logging.debug(file_name)
+ f = open(file_name, 'r')
+ # iterates over the lines of the file
+ state = States.INIT
+ data_type_list = []
+ data_layout_list = []
+ io_list = []
+ class_no = 0
+ for line in f:
+ # Locate class definition by "class...: public IFunction",
+ # There are also exceptions, which will need to support in later version
+ r = re.match("\s*class\s+(\S+)\s*:\s*(public)*", line)
+ if r and state == States.INIT:
+ class_name = r.groups()[0]
+ logging.debug("class name is %s" % (class_name))
+ state = States.IN_CLASS
+ continue
+
+ r = re.match("\s*\}\;", line)
+ if r and state == States.IN_CLASS:
+ state = States.INIT
+ continue
+
+ # * Valid data layouts:
+ # * - All
+ r = re.search('\s*\*\s*Valid data layouts:', line)
+ if r and state == States.IN_CLASS:
+ state = States.DATA_LAYOUT_START
+ continue
+ # Treat data configuration ends with a blank line only with " *"
+ r = re.match('\s*\*\s*$', line)
+ if r and state == States.DATA_LAYOUT_START:
+ state = States.DATA_LAYOUT_END
+ continue
+ # Data layout continues
+ r = re.search('\s*\*\s*\-\s*(.*)', line)
+ if r and state == States.DATA_LAYOUT_START:
+ tmp = r.groups()[0]
+ tmp = tmp.strip()
+ logging.debug(tmp)
+ data_layout_list.append(tmp)
+
+ # * Valid data type configurations:
+ # * |src0 |dst |
+ # * |:--------------|:--------------|
+ # * |QASYMM8 |QASYMM8 |
+ # * |QASYMM8_SIGNED |QASYMM8_SIGNED |
+ # * |QSYMM16 |QSYMM16 |
+ # * |F16 |F16 |
+ # * |F32 |F32 |
+ r = re.search('\s*\*\s*Valid data type configurations:\s*', line)
+ if r and state == States.DATA_LAYOUT_END:
+ state = States.DATA_TYPE_START
+ logging.debug(line)
+ continue
+ # Treat data configuration ends with a blank line only with " *"
+ r = re.match('\s*\*\s*$', line)
+ if r and state == States.DATA_TYPE_START:
+ logging.debug(class_name)
+ logging.debug(data_layout_list)
+ logging.debug(io_list)
+ logging.debug(data_type_list)
+ class_no = class_no + 1
+ if class_no > 1:
+ logging.debug(class_no)
+ self.generate_operator_cl_begin()
+ self.generate_operator_dl_dt_info(class_name, data_layout_list, io_list, data_type_list)
+ state = States.INIT
+ data_type_list = []
+ data_layout_list = []
+ continue
+ # Data type continues
+ r = re.search('\s*\*(.*)', line)
+ if r and state == States.DATA_TYPE_START:
+ tmp = r.groups()[0]
+ tmp = tmp.strip()
+ if re.search('\|\:\-\-\-', tmp):
+ # Skip the table split row "|:-----"
+ continue
+ else:
+ tmp = tmp.strip()
+ if re.search('.*(src|input|dst)', tmp):
+ io_list = tmp.split('|')
+ else:
+ data_type = tmp.split('|')
+ logging.debug(data_type)
+ data_type_list.append(data_type)
+ continue
+
+ f.close()
+
+ def generate_operator_cl_begin(self):
+ self.xml += "<tr>\n"
+
+ def generate_operator_common_info(self, class_name, operator_desc, nn_op_list, rowspan):
+ tmp = "<tr>\n"
+ # Store class name
+ tmp += " <td rowspan=\"" + rowspan + "\">" + class_name + "\n"
+ tmp += " <td rowspan=\"" + rowspan + "\" style=\"width:200px;\">" + operator_desc + "\n"
+ tmp += " <td rowspan=\"" + rowspan + "\">\n"
+ tmp += " <ul>\n"
+ for item in nn_op_list:
+ tmp += " <li>"
+ tmp += item.strip()
+ tmp += "\n"
+ tmp += " </ul>\n"
+ self.xml += tmp
+
+ def generate_operator_dl_dt_info(self, class_name, data_layout, io_list, data_type_list):
+ tmp = " <td>" + class_name + "\n"
+ # Store data layout info
+ tmp += " <td>\n"
+ tmp += " <ul>\n"
+ for item in data_layout:
+ tmp += " <li>"
+ tmp += item.strip()
+ tmp += "\n"
+ tmp += " </ul>\n"
+ tmp += " <td>\n"
+ # Store data type table
+ tmp += " <table>\n"
+ tmp += " <tr>"
+ for io in io_list:
+ # Make sure it's not empty string
+ if len(io) != 0:
+ tmp += "<th>"
+ tmp += io.strip()
+ tmp += "\n"
+ for item in data_type_list:
+ tmp += " <tr>"
+ for i in item:
+ # Make sure it's not empty string
+ if len(i) != 0:
+ tmp += "<td>"
+ tmp += i.strip()
+ tmp += "\n"
+ tmp += " </table>\n"
+ self.xml += tmp
+
+ def generate_table_prefix(self):
+ tmp = "<table>\n"
+ tmp += "<caption id=\"multi_row\"></caption>\n"
+ tmp += "<tr>\n"
+ tmp += " <th>Function\n"
+ tmp += " <th>Description\n"
+ tmp += " <th>Equivalent Android NNAPI Op\n"
+ tmp += " <th>Backends\n"
+ tmp += " <th>Data Layouts\n"
+ tmp += " <th>Data Types\n"
+ self.xml += tmp
+
+ def generate_table_ending(self):
+ self.xml += "</table>\n"
+
+ def dump_xml(self):
+ print(self.xml)
+
+ def update_dox_file(self):
+ operator_list_dox = self.project_dir / "docs" / "user_guide" / "operator_list.dox"
+
+ with open(operator_list_dox, "r") as f:
+ dox_content = f.read()
+
+ # Check that there is only one non-indented table (This table should be the operator list)
+ x = re.findall("\n<table>", dox_content)
+ y = re.findall("\n</table>", dox_content)
+ if len(x) != 1 or len(y) != 1:
+ raise RuntimeError("Invalid .dox file")
+
+ repl_str = "\n" + self.xml[:-1] # Extra / removed "\n" characters needed to make up for search regex
+ new_file = re.sub("\n<table>(.|\n)*\n<\/table>", repl_str, dox_content)
+
+ with open(operator_list_dox, "w") as f:
+ f.write(new_file)
+ print("Successfully updated operator_list.dox with the XML table of supported operators.")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Updates the Compute Library documentation with a table of supported operators."
+ )
+ parser.add_argument(
+ "--dump_xml",
+ type=bool,
+ default=False,
+ required=False,
+ help="Dump the supported operators table XML to stdout",
+ )
+ parser.add_argument(
+ "--debug",
+ type=bool,
+ default=False,
+ required=False,
+ help="Enables logging, helpful for debugging. Default: False",
+ )
+ args = parser.parse_args()
+
+ if args.debug:
+ logging.basicConfig(format="%(message)s", level=logging.DEBUG)
+
+ table_xml = OperatorsTable()
+ table_xml.generate_table_prefix()
+ table_xml.generate_operator_list()
+ table_xml.generate_table_ending()
+ table_xml.update_dox_file()
+
+ if args.dump_xml:
+ table_xml.dump_xml()