aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/cli
diff options
context:
space:
mode:
authorDmitrii Agibov <dmitrii.agibov@arm.com>2022-10-07 11:39:37 +0100
committerDmitrii Agibov <dmitrii.agibov@arm.com>2022-10-07 11:40:21 +0100
commit3083f7ee68ce08147db08fca2474e5f4712fc8d7 (patch)
treec52e668c01a6a1041c08190e52a15944fd65b453 /src/mlia/cli
parentbb7fb49484bb3687041061b2fdbbfae3959be54b (diff)
downloadmlia-3083f7ee68ce08147db08fca2474e5f4712fc8d7.tar.gz
MLIA-607 Update documentation and comments
Use "TensorFlow Lite" instead of "TFLite" in documentation and comments Change-Id: Ie4450d72fb2e5261d152d72ab8bd94c3da914c46
Diffstat (limited to 'src/mlia/cli')
-rw-r--r--src/mlia/cli/commands.py14
-rw-r--r--src/mlia/cli/options.py10
2 files changed, 13 insertions, 11 deletions
diff --git a/src/mlia/cli/commands.py b/src/mlia/cli/commands.py
index 5dd39f9..e044e1a 100644
--- a/src/mlia/cli/commands.py
+++ b/src/mlia/cli/commands.py
@@ -50,7 +50,7 @@ def all_tests(
This command runs a series of tests in order to generate a
comprehensive report/advice:
- - converts the input Keras model into TFLite format
+ - converts the input Keras model into TensorFlow Lite format
- checks the model for operator compatibility on the specified device
- applies optimizations to the model and estimates the resulting performance
on both the original and the optimized models
@@ -112,14 +112,14 @@ def operators(
:param ctx: execution context
:param target_profile: target profile identifier. Will load appropriate parameters
from the profile.json file based on this argument.
- :param model: path to the model, which can be TFLite or Keras
+ :param model: path to the model, which can be TensorFlow Lite or Keras
:param output: path to the file where the report will be saved
:param supported_ops_report: if True then generates supported operators
report in current directory and exits
Example:
Run command for the target profile ethos-u55-256 and the provided
- TFLite model and print report on the standard output
+ TensorFlow Lite model and print report on the standard output
>>> from mlia.api import ExecutionContext
>>> from mlia.cli.logging import setup_logging
@@ -161,13 +161,13 @@ def performance(
:param ctx: execution context
:param target_profile: target profile identifier. Will load appropriate parameters
from the profile.json file based on this argument.
- :param model: path to the model, which can be TFLite or Keras
+ :param model: path to the model, which can be TensorFlow Lite or Keras
:param output: path to the file where the report will be saved
:param evaluate_on: list of the backends to use for evaluation
Example:
Run command for the target profile ethos-u55-256 and
- the provided TFLite model and print report on the standard output
+ the provided TensorFlow Lite model and print report on the standard output
>>> from mlia.api import ExecutionContext
>>> from mlia.cli.logging import setup_logging
@@ -205,7 +205,7 @@ def optimization(
:param ctx: execution context
:param target: target profile identifier. Will load appropriate parameters
from the profile.json file based on this argument.
- :param model: path to the TFLite model
+ :param model: path to the TensorFlow Lite model
:param optimization_type: list of the optimization techniques separated
by comma, e.g. 'pruning,clustering'
:param optimization_target: list of the corresponding targets for
@@ -217,7 +217,7 @@ def optimization(
Example:
Run command for the target profile ethos-u55-256 and
- the provided TFLite model and print report on the standard output
+ the provided TensorFlow Lite model and print report on the standard output
>>> from mlia.cli.logging import setup_logging
>>> setup_logging()
diff --git a/src/mlia/cli/options.py b/src/mlia/cli/options.py
index f7f95c0..e5e85f0 100644
--- a/src/mlia/cli/options.py
+++ b/src/mlia/cli/options.py
@@ -62,15 +62,17 @@ def add_multi_optimization_options(parser: argparse.ArgumentParser) -> None:
def add_optional_tflite_model_options(parser: argparse.ArgumentParser) -> None:
"""Add optional model specific options."""
- model_group = parser.add_argument_group("TFLite model options")
+ model_group = parser.add_argument_group("TensorFlow Lite model options")
# make model parameter optional
- model_group.add_argument("model", nargs="?", help="TFLite model (optional)")
+ model_group.add_argument(
+ "model", nargs="?", help="TensorFlow Lite model (optional)"
+ )
def add_tflite_model_options(parser: argparse.ArgumentParser) -> None:
"""Add model specific options."""
- model_group = parser.add_argument_group("TFLite model options")
- model_group.add_argument("model", help="TFLite model")
+ model_group = parser.add_argument_group("TensorFlow Lite model options")
+ model_group.add_argument("model", help="TensorFlow Lite model")
def add_output_options(parser: argparse.ArgumentParser) -> None: