aboutsummaryrefslogtreecommitdiff
path: root/src/mlia/api.py
blob: 53ea4c814bcba6445f8d6d06b62772fb071b9d58 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
# SPDX-FileCopyrightText: Copyright 2022, Arm Limited and/or its affiliates.
# SPDX-License-Identifier: Apache-2.0
"""Module for the API functions."""
import logging
from pathlib import Path
from typing import Any
from typing import Dict
from typing import List
from typing import Literal
from typing import Optional
from typing import Union

from mlia.core._typing import PathOrFileLike
from mlia.core.advisor import InferenceAdvisor
from mlia.core.common import AdviceCategory
from mlia.core.context import ExecutionContext
from mlia.core.events import EventHandler
from mlia.devices.ethosu.advisor import EthosUInferenceAdvisor
from mlia.devices.ethosu.handlers import EthosUEventHandler


logger = logging.getLogger(__name__)


_DEFAULT_OPTIMIZATION_TARGETS = [
    {
        "optimization_type": "pruning",
        "optimization_target": 0.5,
        "layers_to_optimize": None,
    },
    {
        "optimization_type": "clustering",
        "optimization_target": 32,
        "layers_to_optimize": None,
    },
]


def get_advice(
    target_profile: str,
    model: Union[Path, str],
    category: Literal["all", "operators", "performance", "optimization"] = "all",
    optimization_targets: Optional[List[Dict[str, Any]]] = None,
    working_dir: Union[str, Path] = "mlia_output",
    output: Optional[PathOrFileLike] = None,
    context: Optional[ExecutionContext] = None,
    backends: Optional[List[str]] = None,
) -> None:
    """Get the advice.

    This function represents an entry point to the library API.

    Based on provided parameters it will collect and analyze the data
    and produce the advice.

    :param target_profile: target profile identifier
    :param model: path to the NN model
    :param category: category of the advice. MLIA supports four categories:
           "all", "operators", "performance", "optimization". If not provided
           category "all" is used by default.
    :param optimization_targets: optional model optimization targets that
           could be used for generating advice in categories
           "all" and "optimization."
    :param working_dir: path to the directory that will be used for storing
           intermediate files during execution (e.g. converted models)
    :param output: path to the report file. If provided MLIA will save
           report in this location. Format of the report automatically
           detected based on file extension.
    :param context: optional parameter which represents execution context,
           could be used for advanced use cases
    :param backends: A list of backends that should be used for the given
           target. Default settings will be used if None.


    Examples:
        NB: Before launching MLIA, the logging functionality should be configured!

        Getting the advice for the provided target profile and the model

        >>> get_advice("ethos-u55-256", "path/to/the/model")

        Getting the advice for the category "performance" and save result report in file
        "report.json"

        >>> get_advice("ethos-u55-256", "path/to/the/model", "performance",
                       output="report.json")

    """
    advice_category = AdviceCategory.from_string(category)
    config_parameters = _get_config_parameters(
        model, target_profile, backends, optimization_targets
    )
    event_handlers = _get_event_handlers(output)

    if context is not None:
        if context.advice_category is None:
            context.advice_category = advice_category

        if context.config_parameters is None:
            context.config_parameters = config_parameters

        if context.event_handlers is None:
            context.event_handlers = event_handlers

    if context is None:
        context = ExecutionContext(
            advice_category=advice_category,
            working_dir=working_dir,
            config_parameters=config_parameters,
            event_handlers=event_handlers,
        )

    advisor = _get_advisor(target_profile)
    advisor.run(context)


def _get_advisor(target: Optional[str]) -> InferenceAdvisor:
    """Find appropriate advisor for the target."""
    if not target:
        raise Exception("Target is not provided")

    return EthosUInferenceAdvisor()


def _get_config_parameters(
    model: Union[Path, str],
    target_profile: str,
    backends: Optional[List[str]],
    optimization_targets: Optional[List[Dict[str, Any]]],
) -> Dict[str, Any]:
    """Get configuration parameters for the advisor."""
    advisor_parameters: Dict[str, Any] = {
        "ethos_u_inference_advisor": {
            "model": model,
            "device": {
                "target_profile": target_profile,
            },
        },
    }
    # Specifying backends is optional (default is used)
    if backends is not None:
        advisor_parameters["ethos_u_inference_advisor"]["backends"] = backends

    if not optimization_targets:
        optimization_targets = _DEFAULT_OPTIMIZATION_TARGETS

    advisor_parameters.update(
        {
            "ethos_u_model_optimizations": {
                "optimizations": [
                    optimization_targets,
                ],
            },
        }
    )

    return advisor_parameters


def _get_event_handlers(output: Optional[PathOrFileLike]) -> List[EventHandler]:
    """Return list of the event handlers."""
    return [EthosUEventHandler(output)]