aboutsummaryrefslogtreecommitdiff
path: root/ethosu/vela/lut.py
diff options
context:
space:
mode:
authorDwight Lidman <dwight.lidman@arm.com>2020-12-08 17:56:44 +0100
committerpatrik.gustavsson <patrik.gustavsson@arm.com>2020-12-14 07:00:24 +0000
commit9b43f846b144d39bfb0cf16853bf6901c74b6672 (patch)
treea530dce790bb8e54dad009e11ca4d49d54b52b1d /ethosu/vela/lut.py
parent94457b175b8646bce089c9924e99686587de8992 (diff)
downloadethos-u-vela-9b43f846b144d39bfb0cf16853bf6901c74b6672.tar.gz
MLBEDSW-3653: Fix type errors in annotated files
This commit corrects a number of type errors reported by mypy and refactors some parts of the code which are no longer necessary after making adjustments to satisfy mypy. Signed-off-by: Dwight Lidman <dwight.lidman@arm.com> Change-Id: I16b880b228e57f2a92fb8936f53e94886e0f9f44
Diffstat (limited to 'ethosu/vela/lut.py')
-rw-r--r--ethosu/vela/lut.py7
1 files changed, 4 insertions, 3 deletions
diff --git a/ethosu/vela/lut.py b/ethosu/vela/lut.py
index 8e28b953..8a23b51d 100644
--- a/ethosu/vela/lut.py
+++ b/ethosu/vela/lut.py
@@ -20,7 +20,8 @@ import uuid
import numpy as np
from . import numeric_util
-from .high_level_command_stream import CommandType
+from .high_level_command_stream import DMA
+from .high_level_command_stream import NpuStripe
from .tensor import create_const_tensor
from .tensor import create_equivalence_id
from .tensor import TensorPurpose
@@ -101,11 +102,11 @@ def optimize_high_level_cmd_stream(sg, arch):
lut_start = arch.shram_lut_address
lut_end = lut_start + arch.shram_lut_size
for cmd in sg.high_level_command_stream:
- if cmd.cmdtype == CommandType.NpuStripe and cmd.ps.lut_tensor is None and arch.shram_reserved_unused_banks == 0:
+ if isinstance(cmd, NpuStripe) and cmd.ps.lut_tensor is None and arch.shram_reserved_unused_banks == 0:
# The command overwrites the last 2 banks containing the LUT; next LUT operation will require DMA
# TODO: check the command's SHRAM usage in more detail to determine if the LUT is overwritten or not
lut_state = LUTState()
- if cmd.cmdtype != CommandType.DMA or cmd.out_tensor.purpose != TensorPurpose.LUT:
+ if not isinstance(cmd, DMA) or cmd.out_tensor.purpose != TensorPurpose.LUT:
# Non-LUT operation; leave untouched
cmd_stream.append(cmd)
continue