From ecc4264b93d4a89fa2cb40518b225d8371b7ffad Mon Sep 17 00:00:00 2001 From: Benjamin Klimczak Date: Wed, 12 Jul 2023 15:18:26 +0100 Subject: Enable rewrites for quantized input models If the input model for rewriting is quantized: - Record de-quantized TFRecords - enable writing de-quantized calibration data for the training - re-generate augmented training data, if needed - Use quantization-aware training (QAT) to train the replacement models - Check if replacement model is quantized: If source model is quantized, we make sure rewrite's output model is quantized too. Right now, only int8 is supported so raising an error if any other datatype is present in the output. Resolves: MLIA-907, MLIA-908, MLIA-927 Signed-off-by: Benjamin Klimczak Change-Id: Icb4070a9e6f1fdb5ce36120d73823986e89ac955 --- tests/test_nn_tensorflow_config.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) (limited to 'tests/test_nn_tensorflow_config.py') diff --git a/tests/test_nn_tensorflow_config.py b/tests/test_nn_tensorflow_config.py index 48aec0a..fff3857 100644 --- a/tests/test_nn_tensorflow_config.py +++ b/tests/test_nn_tensorflow_config.py @@ -111,3 +111,15 @@ def test_tflite_model_call( for named_input in data.as_numpy_iterator(): res = model(named_input) assert res + + +def test_tflite_model_is_tensor_quantized(test_tflite_model: Path) -> None: + """Test function TFLiteModel.is_tensor_quantized().""" + model = TFLiteModel(test_tflite_model) + input_details = model.input_details[0] + assert model.is_tensor_quantized(name=input_details["name"]) + assert model.is_tensor_quantized(idx=input_details["index"]) + with pytest.raises(ValueError): + assert model.is_tensor_quantized() + with pytest.raises(NameError): + assert model.is_tensor_quantized(name="NAME_DOES_NOT_EXIST") -- cgit v1.2.1