aboutsummaryrefslogtreecommitdiff
path: root/verif/tests
diff options
context:
space:
mode:
authorJeremy Johnson <jeremy.johnson@arm.com>2023-10-09 16:31:13 +0100
committerJeremy Johnson <jeremy.johnson@arm.com>2023-10-16 15:08:36 +0100
commit65ba809d7a8b4ddd0a51f6c76ad0afc5f417de07 (patch)
tree249926aeeccfb0dac60f27967e5d01001adc5e33 /verif/tests
parent9c2fe6e129e4d176c3e14f172b92efe985af7c78 (diff)
downloadreference_model-65ba809d7a8b4ddd0a51f6c76ad0afc5f417de07.tar.gz
Data generator library python interface added
Added support for using generate library in tosa_verif_build_tests and tosa_verif_run_tests tosa tool scripts. Reduced scope of compliance test creation and verification to the supported type of FP32. Fix missing virtual destructor warning in generate_dot_product.h and add config file for generate library. Simple pytests included to check python interface. Signed-off-by: Jeremy Johnson <jeremy.johnson@arm.com> Change-Id: I6cdad9b00660d6ddc8bd07fdea813937fb48626a
Diffstat (limited to 'verif/tests')
-rw-r--r--verif/tests/test_tosa_datagenerator.py116
-rw-r--r--verif/tests/test_tosa_refmodel.py17
-rw-r--r--verif/tests/test_tosa_run_tests_mocksut.py23
-rw-r--r--verif/tests/test_tosa_verifier.py4
4 files changed, 154 insertions, 6 deletions
diff --git a/verif/tests/test_tosa_datagenerator.py b/verif/tests/test_tosa_datagenerator.py
new file mode 100644
index 0000000..ba0235c
--- /dev/null
+++ b/verif/tests/test_tosa_datagenerator.py
@@ -0,0 +1,116 @@
+"""Tests for the python interface to the data generator library."""
+# Copyright (c) 2023, ARM Limited.
+# SPDX-License-Identifier: Apache-2.0
+from pathlib import Path
+
+import numpy as np
+import pytest
+from generator.datagenerator import GenerateError
+from generator.datagenerator import GenerateLibrary
+
+# NOTE: These tests are marked as POST COMMIT
+# To run them, please build the reference_model in a local "build" directory
+# (as per the README) and run them using: pytest -m "postcommit"
+
+# Location of reference model binaries
+REF_MODEL_BUILD_PATH = Path(__file__).resolve().parents[2] / "build" / "reference_model"
+GENERATE_LIB = "libtosa_reference_generate_lib.so"
+GENERATE_LIB_PATH = REF_MODEL_BUILD_PATH / GENERATE_LIB
+
+TEST_DIR = Path(__file__).parent
+
+
+@pytest.mark.postcommit
+def test_generate_lib_built():
+ """First test to check the library has been built."""
+ assert GENERATE_LIB_PATH.is_file()
+
+
+@pytest.mark.postcommit
+def test_checker_generate_load_fail():
+ with pytest.raises(GenerateError) as excinfo:
+ GenerateLibrary(Path("/place-that-does-not-exist"))
+ assert str(excinfo.value).startswith("Could not find generate library")
+
+
+@pytest.mark.postcommit
+def test_checker_generate_load():
+ glib = GenerateLibrary(GENERATE_LIB_PATH)
+ assert glib
+
+
+JSON_DATAGEN_DOT_PRODUCT = {
+ "tosa_file": "test.json",
+ "ifm_name": ["input-0", "input-1"],
+ "ifm_file": ["input-0.npy", "input-1.npy"],
+ "ofm_name": ["result-0"],
+ "ofm_file": ["result-0.npy"],
+ "meta": {
+ "data_gen": {
+ "version": "0.1",
+ "tensors": {
+ "input-0": {
+ "generator": "DOT_PRODUCT",
+ "data_type": "FP32",
+ "input_type": "VARIABLE",
+ "shape": [3, 5, 4],
+ "input_pos": 0,
+ "op": "MATMUL",
+ "dot_product_info": {"s": 0, "ks": 4, "acc_type": "FP32"},
+ },
+ "input-1": {
+ "generator": "DOT_PRODUCT",
+ "data_type": "FP32",
+ "input_type": "VARIABLE",
+ "shape": [3, 4, 6],
+ "input_pos": 1,
+ "op": "MATMUL",
+ "dot_product_info": {"s": 0, "ks": 4, "acc_type": "FP32"},
+ },
+ },
+ }
+ },
+}
+
+
+@pytest.mark.postcommit
+def test_generate_dot_product_check():
+ glib = GenerateLibrary(GENERATE_LIB_PATH)
+ assert glib
+
+ json_config = JSON_DATAGEN_DOT_PRODUCT
+ glib.set_config(json_config)
+
+ glib.write_numpy_files(TEST_DIR)
+
+ # Test the files exist and are the expected numpy files
+ for f, n in zip(json_config["ifm_file"], json_config["ifm_name"]):
+ file = TEST_DIR / f
+ assert file.is_file()
+ arr = np.load(file)
+ assert arr.shape == tuple(
+ json_config["meta"]["data_gen"]["tensors"][n]["shape"]
+ )
+ assert arr.dtype == np.float32
+ file.unlink()
+
+
+@pytest.mark.postcommit
+def test_generate_dot_product_check_fail_names():
+ glib = GenerateLibrary(GENERATE_LIB_PATH)
+ assert glib
+
+ # Fix up the JSON to have the wrong names
+ json_config = JSON_DATAGEN_DOT_PRODUCT.copy()
+ json_config["ifm_name"] = ["not-input0", "not-input1"]
+ glib.set_config(json_config)
+
+ with pytest.raises(GenerateError) as excinfo:
+ glib.write_numpy_files(TEST_DIR)
+ info = str(excinfo.value).split("\n")
+ for i, n in enumerate(json_config["ifm_name"]):
+ assert info[i].startswith(f"ERROR: Failed to create data for tensor {n}")
+
+ for f in json_config["ifm_file"]:
+ file = TEST_DIR / f
+ assert not file.is_file()
diff --git a/verif/tests/test_tosa_refmodel.py b/verif/tests/test_tosa_refmodel.py
index 675a534..24ee9e2 100644
--- a/verif/tests/test_tosa_refmodel.py
+++ b/verif/tests/test_tosa_refmodel.py
@@ -6,6 +6,7 @@ import re
from pathlib import Path
from shutil import rmtree
+import conformance.model_files as cmf
import numpy as np
import pytest
from checker.tosa_result_checker import test_check as tosa_check
@@ -17,9 +18,13 @@ from runner.run_command import RunShCommandError
# Note: Must rename imports (like test_check) so that pytest doesn't assume its a test function/class
# Location of reference model binaries
-REF_MODEL_BUILD_PATH = Path(__file__).resolve().parents[2] / "build" / "reference_model"
-REF_MODEL_EXE = "tosa_reference_model"
-REF_MODEL_EXE_PATH = REF_MODEL_BUILD_PATH / REF_MODEL_EXE
+REF_MODEL_DIR = Path(__file__).resolve().parents[2]
+REF_MODEL_EXE_PATH = cmf.find_tosa_file(
+ cmf.TosaFileType.REF_MODEL, REF_MODEL_DIR, False
+)
+GENERATE_LIB_PATH = cmf.find_tosa_file(
+ cmf.TosaFileType.GENERATE_LIBRARY, REF_MODEL_EXE_PATH
+)
# Set this to False if you want ot preserve the test directories after running
CLEAN_UP_TESTS = True
@@ -51,7 +56,9 @@ REF_MODEL_TYPE_TO_OUT = {
"bf16": "bf16",
}
-# NOTE: These tests are set to POST COMMIT - so will only run on the CI
+# NOTE: These tests are marked as POST COMMIT
+# To run them, please build the reference_model in a local "build" directory
+# (as per the README) and run them using: pytest -m "postcommit"
@pytest.mark.postcommit
@@ -83,6 +90,8 @@ class BuildTosaTest:
# Generate tests without any zero-point
build_args = [
+ "--generate-lib-path",
+ str(GENERATE_LIB_PATH),
"--filter",
self.op_name,
"--target-shape",
diff --git a/verif/tests/test_tosa_run_tests_mocksut.py b/verif/tests/test_tosa_run_tests_mocksut.py
index fb4a811..f4437b2 100644
--- a/verif/tests/test_tosa_run_tests_mocksut.py
+++ b/verif/tests/test_tosa_run_tests_mocksut.py
@@ -1,4 +1,4 @@
-"""Tests for tosa_verif_run_tests.py."""
+"""Mock SUT tests for tosa_verif_run_tests.py."""
# Copyright (c) 2021-2023, ARM Limited.
# SPDX-License-Identifier: Apache-2.0
import json
@@ -55,16 +55,33 @@ def _delete_desc_json(file: Path):
file.unlink()
+def _create_ifm_files(files):
+ """Create empty input files."""
+ for name in files:
+ file = Path(__file__).parent / name
+ with open(file, "w") as fd:
+ fd.write("empty")
+
+
+def _delete_ifm_files(files):
+ """Delete empty input files."""
+ for name in files:
+ file = Path(__file__).parent / name
+ file.unlink()
+
+
@pytest.fixture
def testDir() -> str:
"""Set up a mock expected pass test."""
print("SET UP - testDir")
_create_fake_ref_model()
+ _create_ifm_files(TEST_DESC["ifm_file"])
file = _create_desc_json(TEST_DESC)
yield file.parent
print("TEAR DOWN - testDir")
_delete_desc_json(file)
_delete_fake_ref_model()
+ _delete_ifm_files(TEST_DESC["ifm_file"])
@pytest.fixture
@@ -74,11 +91,13 @@ def testDirExpectedFail() -> str:
_create_fake_ref_model()
fail = deepcopy(TEST_DESC)
fail["expected_failure"] = True
+ _create_ifm_files(TEST_DESC["ifm_file"])
file = _create_desc_json(fail)
yield file.parent
print("TEAR DOWN - testDirExpectedFail")
_delete_desc_json(file)
_delete_fake_ref_model()
+ _delete_ifm_files(TEST_DESC["ifm_file"])
@pytest.fixture
@@ -89,11 +108,13 @@ def testDirMultiOutputs() -> str:
out = deepcopy(TEST_DESC)
out["ofm_name"].append("tr1")
out["ofm_file"].append("test-result-1.npy")
+ _create_ifm_files(TEST_DESC["ifm_file"])
file = _create_desc_json(out)
yield file.parent
print("TEAR DOWN - testDirMultiOutputs")
_delete_desc_json(file)
_delete_fake_ref_model()
+ _delete_ifm_files(TEST_DESC["ifm_file"])
def _get_default_argv(testDir: Path, graphResult: str) -> list:
diff --git a/verif/tests/test_tosa_verifier.py b/verif/tests/test_tosa_verifier.py
index 864fa9c..a29f983 100644
--- a/verif/tests/test_tosa_verifier.py
+++ b/verif/tests/test_tosa_verifier.py
@@ -8,7 +8,9 @@ import pytest
from checker.verifier import VerifierError
from checker.verifier import VerifierLibrary
-# NOTE: These tests are set to POST COMMIT - so will only run on the CI
+# NOTE: These tests are marked as POST COMMIT
+# To run them, please build the reference_model in a local "build" directory
+# (as per the README) and run them using: pytest -m "postcommit"
# Location of reference model binaries
REF_MODEL_BUILD_PATH = Path(__file__).resolve().parents[2] / "build" / "reference_model"