From 3c07c97e0202c1cf01eba06c24b37a8f15ff7a7c Mon Sep 17 00:00:00 2001 From: Louis Verhaard Date: Thu, 7 May 2020 08:12:58 +0200 Subject: MLBEDSW-1941: Bug fix shared weights If same weight tensor was used with different block configs, errors would occur. Fixed by always cloning weight tensors, using a global weight compression cache and modifying the linear allocator to detect multiple usage of same weight compression. Change-Id: I91ca59176e1c59c66e0ac7a4227f2b5f0b47053f Signed-off-by: Louis Verhaard --- ethosu/vela/nn_graph.py | 1 + 1 file changed, 1 insertion(+) (limited to 'ethosu/vela/nn_graph.py') diff --git a/ethosu/vela/nn_graph.py b/ethosu/vela/nn_graph.py index ed2ab322..ea35c087 100644 --- a/ethosu/vela/nn_graph.py +++ b/ethosu/vela/nn_graph.py @@ -485,6 +485,7 @@ class Graph: self.bits_per_element = {} self.total_size = {} self.total_elements = {} + self.weight_cache = None # See CompressedWeightCache def get_root_subgraph(self): return self.subgraphs[0] -- cgit v1.2.1