aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/cli/main.py
diff options
context:
space:
mode:
authorBenjamin Klimczak <benjamin.klimczak@arm.com>2022-12-23 17:26:09 +0000
committerBenjamin Klimczak <benjamin.klimczak@arm.com>2023-02-08 15:17:02 +0000
commit6fbcffb4ee039438a409fbc92e38fa5d1d118833 (patch)
tree9e2268059a6963201efc8b980d79104337155f2f /src/mlia/cli/main.py
parentc6cfc78d5245c550016b9709686d2b32ab3fcd5b (diff)
downloadmlia-6fbcffb4ee039438a409fbc92e38fa5d1d118833.tar.gz
MLIA-737 Show connection between target / backend
- The help text of MLIA now shows a table of supported targets, backends and advice. - The table is only shown with the help message and not when MLIA is run normally. Change-Id: I3234ce91e943de4b08b9471bd95a474df34755f7
Diffstat (limited to 'src/mlia/cli/main.py')
-rw-r--r--src/mlia/cli/main.py15
1 files changed, 5 insertions, 10 deletions
diff --git a/src/mlia/cli/main.py b/src/mlia/cli/main.py
index 2b63124..4a91b08 100644
--- a/src/mlia/cli/main.py
+++ b/src/mlia/cli/main.py
@@ -11,7 +11,6 @@ from inspect import signature
from mlia import __version__
from mlia.backend.errors import BackendUnavailableError
-from mlia.backend.registry import registry as backend_registry
from mlia.cli.commands import backend_install
from mlia.cli.commands import backend_list
from mlia.cli.commands import backend_uninstall
@@ -35,7 +34,7 @@ from mlia.core.context import ExecutionContext
from mlia.core.errors import ConfigurationError
from mlia.core.errors import InternalError
from mlia.core.logging import setup_logging
-from mlia.target.registry import registry as target_registry
+from mlia.target.registry import table as target_table
logger = logging.getLogger(__name__)
@@ -43,14 +42,10 @@ logger = logging.getLogger(__name__)
INFO_MESSAGE = f"""
ML Inference Advisor {__version__}
-Help the design and optimization of neural network models for efficient inference on a target CPU and NPU
-
-Supported targets:
-{target_registry}
-
-Supported backends:
-{backend_registry}
+Help the design and optimization of neural network models for efficient inference on a target CPU or NPU.
+{target_table().to_plain_text(show_title=True, space=False)}
+Use command 'mlia-backend' to install backends.
""".strip()
@@ -192,7 +187,7 @@ def run_command(args: argparse.Namespace) -> int:
)
try:
- logger.info(INFO_MESSAGE)
+ logger.info("ML Inference Advisor %s", __version__)
logger.info(
"\nThis execution of MLIA uses output directory: %s", ctx.output_dir
)