From e5a0bc3ecd4d9c46ead3b8217584eaa916a3afa4 Mon Sep 17 00:00:00 2001 From: Benjamin Klimczak Date: Thu, 24 Aug 2023 16:38:47 +0100 Subject: MLIA-961 Update tox dependencies - Update version dependencies in the tox.ini - Fix linter issues Change-Id: I04c3a841ee2646a865dab037701d66c28792f2a4 Signed-off-by: Benjamin Klimczak --- src/mlia/backend/corstone/install.py | 2 +- src/mlia/backend/install.py | 8 ++++---- src/mlia/backend/repo.py | 12 ++++++------ src/mlia/backend/vela/compiler.py | 8 +++++--- src/mlia/backend/vela/performance.py | 4 +++- src/mlia/cli/commands.py | 2 +- src/mlia/core/advisor.py | 4 ++-- src/mlia/core/common.py | 2 +- src/mlia/core/events.py | 6 +++--- src/mlia/core/mixins.py | 12 ++++++------ src/mlia/core/reporting.py | 2 +- src/mlia/nn/tensorflow/config.py | 6 +++--- src/mlia/nn/tensorflow/optimizations/select.py | 4 ++-- src/mlia/nn/tensorflow/utils.py | 6 +++--- src/mlia/target/cortex_a/advisor.py | 4 ++-- src/mlia/target/cortex_a/operators.py | 2 +- src/mlia/target/cortex_a/reporters.py | 2 +- src/mlia/target/ethos_u/advisor.py | 6 +++--- src/mlia/target/ethos_u/data_collection.py | 2 +- src/mlia/target/ethos_u/reporters.py | 2 +- src/mlia/target/tosa/advisor.py | 6 ++++-- src/mlia/target/tosa/operators.py | 4 ++-- src/mlia/target/tosa/reporters.py | 2 +- src/mlia/utils/console.py | 2 +- src/mlia/utils/logging.py | 2 +- 25 files changed, 59 insertions(+), 53 deletions(-) (limited to 'src') diff --git a/src/mlia/backend/corstone/install.py b/src/mlia/backend/corstone/install.py index 35976cf..d6101cf 100644 --- a/src/mlia/backend/corstone/install.py +++ b/src/mlia/backend/corstone/install.py @@ -51,7 +51,7 @@ class Corstone300Installer: # this instance subprocess.check_call(fvp_install_cmd) # nosec except subprocess.CalledProcessError as err: - raise Exception( + raise RuntimeError( "Error occurred during Corstone-300 installation" ) from err diff --git a/src/mlia/backend/install.py b/src/mlia/backend/install.py index 4745f19..721b660 100644 --- a/src/mlia/backend/install.py +++ b/src/mlia/backend/install.py @@ -145,7 +145,7 @@ class BackendInstallation(Installation): assert backend_info is not None, "Unable to resolve backend path" self._install_from(backend_info) else: - raise Exception(f"Unable to install {install_type}") + raise RuntimeError(f"Unable to install {install_type}.") def _install_from(self, backend_info: BackendInfo) -> None: """Install backend from the directory.""" @@ -173,7 +173,7 @@ class BackendInstallation(Installation): try: downloaded_to = download_artifact.download_to(tmpdir) except Exception as err: - raise Exception("Unable to download backend artifact") from err + raise RuntimeError("Unable to download backend artifact.") from err with working_directory(tmpdir / "dist", create_dir=True) as dist_dir: with tarfile.open(downloaded_to) as archive: @@ -184,7 +184,7 @@ class BackendInstallation(Installation): backend_path = self.backend_installer(eula_agrement, dist_dir) if self.path_checker(backend_path) is None: - raise Exception("Downloaded artifact has invalid structure") + raise ValueError("Downloaded artifact has invalid structure.") self.install(InstallFromPath(backend_path)) @@ -311,7 +311,7 @@ class PyPackageBackendInstallation(Installation): def install(self, install_type: InstallationType) -> None: """Install the backend.""" if not self.supports(install_type): - raise Exception(f"Unsupported installation type {install_type}") + raise ValueError(f"Unsupported installation type {install_type}.") self.package_manager.install(self._packages_to_install) diff --git a/src/mlia/backend/repo.py b/src/mlia/backend/repo.py index 3dd2e57..b64a46a 100644 --- a/src/mlia/backend/repo.py +++ b/src/mlia/backend/repo.py @@ -109,7 +109,7 @@ class BackendRepository: repo_backend_path = self._get_backend_path(backend_dir_name) if repo_backend_path.exists(): - raise Exception(f"Unable to copy backend files for {backend_name}.") + raise RuntimeError(f"Unable to copy backend files for {backend_name}.") copy_all(backend_path, dest=repo_backend_path) @@ -126,7 +126,7 @@ class BackendRepository: ) -> None: """Add backend to repository.""" if self.is_backend_installed(backend_name): - raise Exception(f"Backend {backend_name} already installed.") + raise RuntimeError(f"Backend {backend_name} already installed.") settings = settings or {} settings["backend_path"] = backend_path.absolute().as_posix() @@ -138,7 +138,7 @@ class BackendRepository: settings = self.config_file.get_backend_settings(backend_name) if not settings: - raise Exception(f"Backend {backend_name} is not installed.") + raise RuntimeError(f"Backend {backend_name} is not installed.") if "backend_dir" in settings: repo_backend_path = self._get_backend_path(settings["backend_dir"]) @@ -155,7 +155,7 @@ class BackendRepository: settings = self.config_file.get_backend_settings(backend_name) if not settings: - raise Exception(f"Backend {backend_name} is not installed.") + raise RuntimeError(f"Backend {backend_name} is not installed.") if backend_dir := settings.get("backend_dir", None): return self._get_backend_path(backend_dir), settings @@ -163,7 +163,7 @@ class BackendRepository: if backend_path := settings.get("backend_path", None): return Path(backend_path), settings - raise Exception(f"Unable to resolve path of the backend {backend_name}.") + raise RuntimeError(f"Unable to resolve path of the backend {backend_name}.") def _get_backend_path(self, backend_dir_name: str) -> Path: """Return path to backend.""" @@ -173,7 +173,7 @@ class BackendRepository: """Init repository.""" if self.repository.exists(): if not self.config_file.exists(): - raise Exception( + raise RuntimeError( f"Directory {self.repository} could not be used as MLIA repository." ) else: diff --git a/src/mlia/backend/vela/compiler.py b/src/mlia/backend/vela/compiler.py index afad05b..78f97b2 100644 --- a/src/mlia/backend/vela/compiler.py +++ b/src/mlia/backend/vela/compiler.py @@ -129,7 +129,7 @@ class VelaCompiler: # pylint: disable=too-many-instance-attributes nng, network_type = model.nng, NetworkType.TFLite if not nng: - raise Exception("Unable to read model") + raise ValueError("Unable to read model: model.nng is not available") output_basename = f"{self.output_dir}/{nng.name}" @@ -152,7 +152,9 @@ class VelaCompiler: # pylint: disable=too-many-instance-attributes return OptimizedModel(nng, arch, compiler_options, scheduler_options) except (SystemExit, Exception) as err: - raise Exception("Model could not be optimized with Vela compiler") from err + raise RuntimeError( + "Model could not be optimized with Vela compiler." + ) from err def get_config(self) -> dict[str, Any]: """Get compiler configuration.""" @@ -200,7 +202,7 @@ class VelaCompiler: # pylint: disable=too-many-instance-attributes ): return read_model(model_path, ModelReaderOptions()) # type: ignore except (SystemExit, Exception) as err: - raise Exception(f"Unable to read model {model_path}") from err + raise RuntimeError(f"Unable to read model {model_path}.") from err def _architecture_features(self) -> ArchitectureFeatures: """Return ArchitectureFeatures instance.""" diff --git a/src/mlia/backend/vela/performance.py b/src/mlia/backend/vela/performance.py index e545b85..a548b26 100644 --- a/src/mlia/backend/vela/performance.py +++ b/src/mlia/backend/vela/performance.py @@ -56,7 +56,9 @@ def estimate_performance( initial_model = vela_compiler.read_model(model_path) if initial_model.optimized: - raise Exception("Unable to estimate performance for the given optimized model") + raise ValueError( + "Unable to estimate performance for the given optimized model." + ) optimized_model = vela_compiler.compile_model(initial_model) diff --git a/src/mlia/cli/commands.py b/src/mlia/cli/commands.py index f0ba519..1f339ee 100644 --- a/src/mlia/cli/commands.py +++ b/src/mlia/cli/commands.py @@ -72,7 +72,7 @@ def check( "model.h5", compatibility=True, performance=True) """ if not model: - raise Exception("Model is not provided") + raise ValueError("Model is not provided.") # Set category based on checks to perform (i.e. "compatibility" and/or # "performance"). diff --git a/src/mlia/core/advisor.py b/src/mlia/core/advisor.py index d684241..7db5cfb 100644 --- a/src/mlia/core/advisor.py +++ b/src/mlia/core/advisor.py @@ -1,4 +1,4 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 """Inference advisor module.""" from __future__ import annotations @@ -77,7 +77,7 @@ class DefaultInferenceAdvisor(InferenceAdvisor, ParameterResolverMixin): model = Path(model_param) if not model.exists(): - raise Exception(f"Path {model} does not exist") + raise FileNotFoundError(f"Path {model} does not exist.") return model diff --git a/src/mlia/core/common.py b/src/mlia/core/common.py index baaed50..e437a75 100644 --- a/src/mlia/core/common.py +++ b/src/mlia/core/common.py @@ -36,7 +36,7 @@ class AdviceCategory(Flag): category_names = [item.name for item in AdviceCategory] for advice_value in values: if advice_value.upper() not in category_names: - raise Exception(f"Invalid advice category {advice_value}") + raise ValueError(f"Invalid advice category {advice_value}.") return {AdviceCategory[value.upper()] for value in values} diff --git a/src/mlia/core/events.py b/src/mlia/core/events.py index e328cc1..ae22771 100644 --- a/src/mlia/core/events.py +++ b/src/mlia/core/events.py @@ -1,4 +1,4 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 """Module for the events and related functionality. @@ -267,14 +267,14 @@ class EventDispatcherMetaclass(type): """ def __new__( - cls, + mcs, clsname: str, bases: tuple[type, ...], namespace: dict[str, Any], event_handler_method_prefix: str = "on_", ) -> Any: """Create event dispatcher and link event handlers.""" - new_class = super().__new__(cls, clsname, bases, namespace) + new_class = super().__new__(mcs, clsname, bases, namespace) @singledispatchmethod def dispatcher(_self: Any, _event: Event) -> Any: diff --git a/src/mlia/core/mixins.py b/src/mlia/core/mixins.py index 5ef9d66..e50e6f7 100644 --- a/src/mlia/core/mixins.py +++ b/src/mlia/core/mixins.py @@ -1,4 +1,4 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 """Mixins module.""" from __future__ import annotations @@ -35,21 +35,21 @@ class ParameterResolverMixin: ctx = context or self.context if ctx.config_parameters is None: - raise Exception("Configuration parameters are not set") + raise ValueError("Configuration parameters are not set.") section_params = ctx.config_parameters.get(section) if section_params is None or not isinstance(section_params, dict): - raise Exception( + raise ValueError( f"Parameter section {section} has wrong format, " - "expected to be a dictionary" + "expected to be a dictionary." ) value = section_params.get(name) if not value and expected: - raise Exception(f"Parameter {name} is not set") + raise ValueError(f"Parameter {name} is not set.") if value and expected_type is not None and not isinstance(value, expected_type): - raise Exception(f"Parameter {name} expected to have type {expected_type}") + raise TypeError(f"Parameter {name} expected to have type {expected_type}.") return value diff --git a/src/mlia/core/reporting.py b/src/mlia/core/reporting.py index 7b9ce5c..722adfd 100644 --- a/src/mlia/core/reporting.py +++ b/src/mlia/core/reporting.py @@ -427,7 +427,7 @@ class SingleRow(Table): def to_plain_text(self, **kwargs: Any) -> str: """Produce report in human readable format.""" if len(self.rows) != 1: - raise Exception("Table should have only one row") + raise RuntimeError(f"Table should have only one row, but has {self.rows}.") items = "\n".join( column.header.ljust(35) + str(item).rjust(25) diff --git a/src/mlia/nn/tensorflow/config.py b/src/mlia/nn/tensorflow/config.py index 0c3133a..d7d430f 100644 --- a/src/mlia/nn/tensorflow/config.py +++ b/src/mlia/nn/tensorflow/config.py @@ -1,4 +1,4 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 """Model configuration.""" from __future__ import annotations @@ -110,8 +110,8 @@ def get_model(model: str | Path) -> ModelConfiguration: if is_saved_model(model): return TfModel(model) - raise Exception( - "The input model format is not supported" + raise ValueError( + "The input model format is not supported " "(supported formats: TensorFlow Lite, Keras, TensorFlow saved model)!" ) diff --git a/src/mlia/nn/tensorflow/optimizations/select.py b/src/mlia/nn/tensorflow/optimizations/select.py index d4a8ea4..a78df12 100644 --- a/src/mlia/nn/tensorflow/optimizations/select.py +++ b/src/mlia/nn/tensorflow/optimizations/select.py @@ -1,4 +1,4 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 """Module for optimization selection.""" from __future__ import annotations @@ -64,7 +64,7 @@ class OptimizationSettings(NamedTuple): self.optimization_type, next_target, self.layers_to_optimize ) - raise Exception(f"Unknown optimization type {self.optimization_type}") + raise ValueError(f"Optimization type {self.optimization_type} is unknown.") class MultiStageOptimizer(Optimizer): diff --git a/src/mlia/nn/tensorflow/utils.py b/src/mlia/nn/tensorflow/utils.py index 287e6ff..d688a63 100644 --- a/src/mlia/nn/tensorflow/utils.py +++ b/src/mlia/nn/tensorflow/utils.py @@ -1,4 +1,4 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-FileCopyrightText: Copyright The TensorFlow Authors. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Collection of useful functions for optimizations.""" @@ -22,7 +22,7 @@ def representative_dataset( ) -> Callable: """Sample dataset used for quantization.""" if input_shape[0] != 1: - raise Exception("Only the input batch_size=1 is supported!") + raise ValueError("Only the input batch_size=1 is supported!") def dataset() -> Iterable: for _ in range(sample_count): @@ -41,7 +41,7 @@ def get_tf_tensor_shape(model: str) -> list: default_signature = loaded.signatures[default_signature_key] inputs_tensor_info = default_signature.inputs except KeyError as err: - raise Exception(f"Signature '{default_signature_key}' not found") from err + raise KeyError(f"Signature '{default_signature_key}' not found.") from err return [ dim diff --git a/src/mlia/target/cortex_a/advisor.py b/src/mlia/target/cortex_a/advisor.py index 3c127ec..db07b96 100644 --- a/src/mlia/target/cortex_a/advisor.py +++ b/src/mlia/target/cortex_a/advisor.py @@ -45,12 +45,12 @@ class CortexAInferenceAdvisor(DefaultInferenceAdvisor): collectors.append(CortexAOperatorCompatibility(model, target_config)) if context.category_enabled(AdviceCategory.PERFORMANCE): - raise Exception( + raise RuntimeError( "Performance estimation is currently not supported for Cortex-A." ) if context.category_enabled(AdviceCategory.OPTIMIZATION): - raise Exception( + raise RuntimeError( "Model optimizations are currently not supported for Cortex-A." ) diff --git a/src/mlia/target/cortex_a/operators.py b/src/mlia/target/cortex_a/operators.py index cd92f31..4a8d992 100644 --- a/src/mlia/target/cortex_a/operators.py +++ b/src/mlia/target/cortex_a/operators.py @@ -143,7 +143,7 @@ def get_cortex_a_compatibility_info( def report() -> None: """Generate supported operators report.""" - raise Exception( + raise NotImplementedError( "Generating a supported operators report is not " "currently supported with Cortex-A target profile." ) diff --git a/src/mlia/target/cortex_a/reporters.py b/src/mlia/target/cortex_a/reporters.py index e7247f5..7f4f21b 100644 --- a/src/mlia/target/cortex_a/reporters.py +++ b/src/mlia/target/cortex_a/reporters.py @@ -87,4 +87,4 @@ def cortex_a_formatters(data: Any) -> Callable[[Any], Report]: if isinstance(data, CortexACompatibilityInfo): return report_cortex_a_operators - raise Exception(f"Unable to find appropriate formatter for {data}.") + raise RuntimeError(f"Unable to find appropriate formatter for {data}.") diff --git a/src/mlia/target/ethos_u/advisor.py b/src/mlia/target/ethos_u/advisor.py index 714d6a4..d2c308a 100644 --- a/src/mlia/target/ethos_u/advisor.py +++ b/src/mlia/target/ethos_u/advisor.py @@ -54,7 +54,7 @@ class EthosUInferenceAdvisor(DefaultInferenceAdvisor): if is_tflite_model(model): # TensorFlow Lite models do not support optimization (only performance)! if context.category_enabled(AdviceCategory.OPTIMIZATION): - raise Exception( + raise RuntimeError( "Optimizations are not supported for TensorFlow Lite files." ) if context.category_enabled(AdviceCategory.PERFORMANCE): @@ -170,7 +170,7 @@ def _get_config_parameters( backends = extra_args.get("backends") if backends is not None: if not is_list_of(backends, str): - raise Exception("Backends value has wrong format") + raise ValueError("Backends value has wrong format.") advisor_parameters["ethos_u_inference_advisor"]["backends"] = backends @@ -179,7 +179,7 @@ def _get_config_parameters( optimization_targets = _DEFAULT_OPTIMIZATION_TARGETS if not is_list_of(optimization_targets, dict): - raise Exception("Optimization targets value has wrong format") + raise ValueError("Optimization targets value has wrong format.") advisor_parameters.update( { diff --git a/src/mlia/target/ethos_u/data_collection.py b/src/mlia/target/ethos_u/data_collection.py index 8348393..0654143 100644 --- a/src/mlia/target/ethos_u/data_collection.py +++ b/src/mlia/target/ethos_u/data_collection.py @@ -178,7 +178,7 @@ class EthosUOptimizationPerformance(ContextAwareDataCollector): ) -> list[list[OptimizationSettings]]: """Parse optimization parameters.""" if not is_list_of(optimizations, list): - raise Exception("Optimization parameters expected to be a list") + raise ValueError("Optimization parameters expected to be a list.") return [ [ diff --git a/src/mlia/target/ethos_u/reporters.py b/src/mlia/target/ethos_u/reporters.py index 00e68b5..711f036 100644 --- a/src/mlia/target/ethos_u/reporters.py +++ b/src/mlia/target/ethos_u/reporters.py @@ -390,6 +390,6 @@ def ethos_u_formatters(data: Any) -> Callable[[Any], Report]: report = report_tflite_compatiblity else: - raise Exception(f"Unable to find appropriate formatter for {data}") + raise RuntimeError(f"Unable to find appropriate formatter for {data}.") return report diff --git a/src/mlia/target/tosa/advisor.py b/src/mlia/target/tosa/advisor.py index 7859eca..2d5163e 100644 --- a/src/mlia/target/tosa/advisor.py +++ b/src/mlia/target/tosa/advisor.py @@ -44,12 +44,14 @@ class TOSAInferenceAdvisor(DefaultInferenceAdvisor): collectors.append(TOSAOperatorCompatibility(model)) if context.category_enabled(AdviceCategory.PERFORMANCE): - raise Exception( + raise RuntimeError( "Performance estimation is currently not supported for TOSA." ) if context.category_enabled(AdviceCategory.OPTIMIZATION): - raise Exception("Model optimizations are currently not supported for TOSA.") + raise RuntimeError( + "Model optimizations are currently not supported for TOSA." + ) return collectors diff --git a/src/mlia/target/tosa/operators.py b/src/mlia/target/tosa/operators.py index b75ceb0..62f2e76 100644 --- a/src/mlia/target/tosa/operators.py +++ b/src/mlia/target/tosa/operators.py @@ -1,11 +1,11 @@ -# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates. +# SPDX-FileCopyrightText: Copyright 2022-2023, Arm Limited and/or its affiliates. # SPDX-License-Identifier: Apache-2.0 """Operators module.""" def report() -> None: """Generate supported operators report.""" - raise Exception( + raise NotImplementedError( "Generating a supported operators report is not " "currently supported with TOSA target profile." ) diff --git a/src/mlia/target/tosa/reporters.py b/src/mlia/target/tosa/reporters.py index e10f047..7b91e94 100644 --- a/src/mlia/target/tosa/reporters.py +++ b/src/mlia/target/tosa/reporters.py @@ -168,4 +168,4 @@ def tosa_formatters(data: Any) -> Callable[[Any], Report]: if isinstance(data, TFLiteCompatibilityInfo): return report_tflite_compatiblity - raise Exception(f"Unable to find appropriate formatter for {data}") + raise RuntimeError(f"Unable to find appropriate formatter for {data}.") diff --git a/src/mlia/utils/console.py b/src/mlia/utils/console.py index 57e3ba2..1f432fb 100644 --- a/src/mlia/utils/console.py +++ b/src/mlia/utils/console.py @@ -77,7 +77,7 @@ def _get_table(table_style: str) -> Table: if table_style == "no_borders": return Table(show_header=False, box=None) - raise Exception(f"Unsupported table style {table_style}") + raise ValueError(f"Table style {table_style} is not supported.") def _convert_to_text(*renderables: RenderableType) -> str: diff --git a/src/mlia/utils/logging.py b/src/mlia/utils/logging.py index 17f6cae..07b16df 100644 --- a/src/mlia/utils/logging.py +++ b/src/mlia/utils/logging.py @@ -181,7 +181,7 @@ def create_log_handler( handler = logging.StreamHandler(stream) if handler is None: - raise Exception("Unable to create logging handler") + raise RuntimeError("Unable to create logging handler.") if log_level: handler.setLevel(log_level) -- cgit v1.2.1