From d73d14fd77fe1405a33b3ecf3c56e1ac65647ff7 Mon Sep 17 00:00:00 2001 From: Ferran Balaguer Date: Mon, 10 Jun 2019 10:29:54 +0100 Subject: IVGCVSW-3229 Refactor L2Normalization workload to support multiple data types Signed-off-by: Ferran Balaguer Change-Id: I848056aad4b172d432664633eea000843d85a85d --- src/backends/backendsCommon/test/LayerTests.cpp | 302 +++++++++++++++--------- src/backends/backendsCommon/test/LayerTests.hpp | 20 ++ 2 files changed, 216 insertions(+), 106 deletions(-) (limited to 'src/backends/backendsCommon/test') diff --git a/src/backends/backendsCommon/test/LayerTests.cpp b/src/backends/backendsCommon/test/LayerTests.cpp index af426a470b..3216ac68ef 100644 --- a/src/backends/backendsCommon/test/LayerTests.cpp +++ b/src/backends/backendsCommon/test/LayerTests.cpp @@ -5283,17 +5283,19 @@ LayerTestResult FakeQuantizationTest( namespace { - -LayerTestResult L2NormalizationTestImpl( +template> +LayerTestResult L2NormalizationTestImpl( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + float qScale, + int32_t qOffset, const armnn::TensorShape& inputOutputTensorShape, const std::vector& inputValues, const std::vector& expectedOutputValues, const armnn::DataLayout layout) { - const armnn::TensorInfo inputTensorInfo(inputOutputTensorShape, armnn::DataType::Float32); - const armnn::TensorInfo outputTensorInfo(inputOutputTensorShape, armnn::DataType::Float32); + const armnn::TensorInfo inputTensorInfo(inputOutputTensorShape, ArmnnType, qScale, qOffset); + const armnn::TensorInfo outputTensorInfo(inputOutputTensorShape, ArmnnType, qScale, qOffset); // at this point if we require it permute the input data const armnn::PermutationVector NCHWToNHWC = { 0, 3, 1, 2 }; @@ -5305,18 +5307,25 @@ LayerTestResult L2NormalizationTestImpl( inputData = tmp; } - auto inputTensor = MakeTensor(inputTensorInfo, std::vector(inputData)); + auto inputTensor = MakeTensor(inputTensorInfo, QuantizedVector( + inputTensorInfo.GetQuantizationScale(), + inputTensorInfo.GetQuantizationOffset(), + inputData)); - LayerTestResult result(outputTensorInfo); std::vector expectedOutputData = expectedOutputValues; if (layout == armnn::DataLayout::NHWC) { std::vector tmp(expectedOutputData.size()); - armnnUtils::Permute(inputTensorInfo.GetShape(), NCHWToNHWC, - expectedOutputData.data(), tmp.data(), sizeof(float)); + armnnUtils::Permute(inputTensorInfo.GetShape(), NCHWToNHWC, expectedOutputData.data(), tmp.data(), + sizeof(float)); expectedOutputData = tmp; } - result.outputExpected = MakeTensor(inputTensorInfo, std::vector(expectedOutputData)); + + LayerTestResult result(outputTensorInfo); + result.outputExpected = MakeTensor(outputTensorInfo, QuantizedVector( + outputTensorInfo.GetQuantizationScale(), + outputTensorInfo.GetQuantizationOffset(), + expectedOutputData)); std::unique_ptr inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo); std::unique_ptr outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo); @@ -5786,10 +5795,13 @@ LayerTestResult PadFloat324dTest( return Pad4dTestCommon(workloadFactory, memoryManager, 0.0f, 0); } -LayerTestResult L2Normalization1dTest( - armnn::IWorkloadFactory& workloadFactory, - const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, - const armnn::DataLayout layout) +template> +LayerTestResult L2Normalization1dTestCommon( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + float qScale, + int32_t qOffset, + const armnn::DataLayout layout) { // Width: 1 // Height: 1 @@ -5806,31 +5818,31 @@ LayerTestResult L2Normalization1dTest( std::vector inputValues { // Batch 0, Channel 0, Height (1) x Width (1) - 1.0f, + 1.0f, // Batch 0, Channel 1, Height (1) x Width (1) - 2.0f, + 2.0f, // Batch 0, Channel 2, Height (1) x Width (1) - 3.0f, + 3.0f, // Batch 0, Channel 3, Height (1) x Width (1) - 4.0f, + 4.0f, // Batch 0, Channel 4, Height (1) x Width (1) - 5.0f, + 5.0f, // Batch 0, Channel 5, Height (1) x Width (1) - 6.0f, + 6.0f, // Batch 0, Channel 6, Height (1) x Width (1) - 7.0f, + 7.0f, // Batch 0, Channel 7, Height (1) x Width (1) - 8.0f, + 8.0f, // Batch 0, Channel 8, Height (1) x Width (1) - 9.0f, + 9.0f, // Batch 0, Channel 9, Height (1) x Width (1) 10.0f @@ -5839,27 +5851,48 @@ LayerTestResult L2Normalization1dTest( std::vector expectedOutputValues { // Batch 0, Channel 0, Height (1) x Width (1) - 1.0f * approxInvL2Norm, - 2.0f * approxInvL2Norm, - 3.0f * approxInvL2Norm, - 4.0f * approxInvL2Norm, - 5.0f * approxInvL2Norm, - 6.0f * approxInvL2Norm, - 7.0f * approxInvL2Norm, - 8.0f * approxInvL2Norm, - 9.0f * approxInvL2Norm, + 1.0f * approxInvL2Norm, + 2.0f * approxInvL2Norm, + 3.0f * approxInvL2Norm, + 4.0f * approxInvL2Norm, + 5.0f * approxInvL2Norm, + 6.0f * approxInvL2Norm, + 7.0f * approxInvL2Norm, + 8.0f * approxInvL2Norm, + 9.0f * approxInvL2Norm, 10.0f * approxInvL2Norm }; - return L2NormalizationTestImpl(workloadFactory, memoryManager, inputOutputShape, - inputValues, expectedOutputValues, layout); + return L2NormalizationTestImpl(workloadFactory, memoryManager, qScale, qOffset, inputOutputShape, + inputValues, expectedOutputValues, layout); } -LayerTestResult L2Normalization2dTest( + +LayerTestResult L2Normalization1dTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::DataLayout layout) +{ + return L2Normalization1dTestCommon(workloadFactory, memoryManager, 0.f, 0, layout); +} + +LayerTestResult L2Normalization1dInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout) +{ + return L2Normalization1dTestCommon(workloadFactory, memoryManager, 1.f, 0, + layout); +} + +template> +LayerTestResult L2Normalization2dTestCommon( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + float qScale, + int32_t qOffset, + const armnn::DataLayout layout) { // Width: 5 // Height: 1 @@ -5883,28 +5916,48 @@ LayerTestResult L2Normalization2dTest( std::vector expectedOutputValues { // Batch 0, Channel 0, Height (1) x Width (5) - 1.0f * CalcInvL2Norm({ 1.0f, 2.0f }), - 3.0f * CalcInvL2Norm({ 3.0f, 4.0f }), - 5.0f * CalcInvL2Norm({ 5.0f, 6.0f }), - 7.0f * CalcInvL2Norm({ 7.0f, 8.0f }), - 9.0f * CalcInvL2Norm({ 9.0f, 10.0f }), + 1.0f * CalcInvL2Norm({ 1.0f, 2.0f }), + 3.0f * CalcInvL2Norm({ 3.0f, 4.0f }), + 5.0f * CalcInvL2Norm({ 5.0f, 6.0f }), + 7.0f * CalcInvL2Norm({ 7.0f, 8.0f }), + 9.0f * CalcInvL2Norm({ 9.0f, 10.0f }), // Batch 0, Channel 1, Height (1) x Width (5) - 2.0f * CalcInvL2Norm({ 1.0f, 2.0f }), - 4.0f * CalcInvL2Norm({ 3.0f, 4.0f }), - 6.0f * CalcInvL2Norm({ 5.0f, 6.0f }), - 8.0f * CalcInvL2Norm({ 7.0f, 8.0f }), + 2.0f * CalcInvL2Norm({ 1.0f, 2.0f }), + 4.0f * CalcInvL2Norm({ 3.0f, 4.0f }), + 6.0f * CalcInvL2Norm({ 5.0f, 6.0f }), + 8.0f * CalcInvL2Norm({ 7.0f, 8.0f }), 10.0f * CalcInvL2Norm({ 9.0f, 10.0f }) }; - return L2NormalizationTestImpl(workloadFactory, memoryManager, inputOutputShape, - inputValues, expectedOutputValues, layout); + return L2NormalizationTestImpl(workloadFactory, memoryManager, qScale, qOffset, inputOutputShape, + inputValues, expectedOutputValues, layout); } -LayerTestResult L2Normalization3dTest( +LayerTestResult L2Normalization2dTest( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout) +{ + return L2Normalization2dTestCommon(workloadFactory, memoryManager, 0.f, 0, layout); +} + +LayerTestResult L2Normalization2dInt16Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::DataLayout layout) +{ + return L2Normalization1dTestCommon(workloadFactory, memoryManager, 1.f, 0, + layout); +} + +template> +LayerTestResult L2Normalization3dTestCommon( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + float qScale, + int32_t qOffset, + const armnn::DataLayout layout) { // Width: 3 // Height: 4 @@ -5922,25 +5975,25 @@ LayerTestResult L2Normalization3dTest( // Batch 0, Channel 0, Height (4) x Width (3) 119.0f, 21.0f, 150.0f, 149.0f, 32.0f, 179.0f, - 15.0f, 227.0f, 141.0f, + 15.0f, 227.0f, 141.0f, 147.0f, 199.0f, 220.0f, // Batch 0, Channel 1, Height (4) x Width (3) 110.0f, 140.0f, 73.0f, 211.0f, 212.0f, 89.0f, - 24.0f, 138.0f, 188.0f, + 24.0f, 138.0f, 188.0f, 162.0f, 12.0f, 161.0f }; std::vector expectedOutputValues { // Batch 0, Channel 0, Height (4) x Width (3) 119.0f * CalcInvL2Norm({ 119.0f, 110.0f }), - 21.0f * CalcInvL2Norm({ 21.0f, 140.0f }), + 21.0f * CalcInvL2Norm({ 21.0f, 140.0f }), 150.0f * CalcInvL2Norm({ 150.0f, 73.0f }), 149.0f * CalcInvL2Norm({ 149.0f, 211.0f }), - 32.0f * CalcInvL2Norm({ 32.0f, 212.0f }), + 32.0f * CalcInvL2Norm({ 32.0f, 212.0f }), 179.0f * CalcInvL2Norm({ 179.0f, 89.0f }), - 15.0f * CalcInvL2Norm({ 15.0f, 24.0f }), + 15.0f * CalcInvL2Norm({ 15.0f, 24.0f }), 227.0f * CalcInvL2Norm({ 227.0f, 138.0f }), 141.0f * CalcInvL2Norm({ 141.0f, 188.0f }), 147.0f * CalcInvL2Norm({ 147.0f, 162.0f }), @@ -5950,28 +6003,48 @@ LayerTestResult L2Normalization3dTest( // Batch 0, Channel 1, Height (4) x Width (3) 110.0f * CalcInvL2Norm({ 119.0f, 110.0f }), 140.0f * CalcInvL2Norm({ 21.0f, 140.0f }), - 73.0f * CalcInvL2Norm({ 150.0f, 73.0f }), + 73.0f * CalcInvL2Norm({ 150.0f, 73.0f }), 211.0f * CalcInvL2Norm({ 149.0f, 211.0f }), 212.0f * CalcInvL2Norm({ 32.0f, 212.0f }), - 89.0f * CalcInvL2Norm({ 179.0f, 89.0f }), - 24.0f * CalcInvL2Norm({ 15.0f, 24.0f }), + 89.0f * CalcInvL2Norm({ 179.0f, 89.0f }), + 24.0f * CalcInvL2Norm({ 15.0f, 24.0f }), 138.0f * CalcInvL2Norm({ 227.0f, 138.0f }), 188.0f * CalcInvL2Norm({ 141.0f, 188.0f }), 162.0f * CalcInvL2Norm({ 147.0f, 162.0f }), - 12.0f * CalcInvL2Norm({ 199.0f, 12.0f }), + 12.0f * CalcInvL2Norm({ 199.0f, 12.0f }), 161.0f * CalcInvL2Norm({ 220.0f, 161.0f }) }; - return L2NormalizationTestImpl(workloadFactory, memoryManager, inputOutputShape, - inputValues, expectedOutputValues, layout); + return L2NormalizationTestImpl(workloadFactory, memoryManager, qScale, qOffset, inputOutputShape, + inputValues, expectedOutputValues, layout); } -LayerTestResult L2Normalization4dTest( +LayerTestResult L2Normalization3dTest( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout) +{ + return L2Normalization3dTestCommon(workloadFactory, memoryManager, 0.f, 0, layout); +} + +LayerTestResult L2Normalization3dInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout) +{ + return L2Normalization1dTestCommon(workloadFactory, memoryManager, 1.f, 0, + layout); +} + +template> +LayerTestResult L2Normalization4dTestCommon( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + float qScale, + int32_t qOffset, const armnn::DataLayout layout) { - // Width: 3 + // Width: 3 // Height: 4 // Channels: 3 // BatchSize: 2 @@ -5988,127 +6061,144 @@ LayerTestResult L2Normalization4dTest( 235.0f, 46.0f, 178.0f, 100.0f, 123.0f, 19.0f, 172.0f, 74.0f, 250.0f, - 6.0f, 195.0f, 80.0f, + 6.0f, 195.0f, 80.0f, // Batch 0, Channel 1, Height (4) x Width (3) 113.0f, 95.0f, 202.0f, - 77.0f, 114.0f, 71.0f, + 77.0f, 114.0f, 71.0f, 122.0f, 246.0f, 166.0f, - 82.0f, 28.0f, 37.0f, + 82.0f, 28.0f, 37.0f, // Batch 0, Channel 2, Height (4) x Width (3) - 56.0f, 170.0f, 162.0f, + 56.0f, 170.0f, 162.0f, 194.0f, 89.0f, 254.0f, - 12.0f, 209.0f, 200.0f, - 1.0f, 64.0f, 54.0f, + 12.0f, 209.0f, 200.0f, + 1.0f, 64.0f, 54.0f, // Batch 1, Channel 0, Height (4) x Width (3) - 67.0f, 90.0f, 49.0f, - 7.0f, 163.0f, 18.0f, - 25.0f, 117.0f, 103.0f, + 67.0f, 90.0f, 49.0f, + 7.0f, 163.0f, 18.0f, + 25.0f, 117.0f, 103.0f, 247.0f, 59.0f, 189.0f, // Batch 1, Channel 1, Height (4) x Width (3) 239.0f, 104.0f, 199.0f, - 17.0f, 124.0f, 153.0f, + 17.0f, 124.0f, 153.0f, 222.0f, 217.0f, 75.0f, - 32.0f, 126.0f, 21.0f, + 32.0f, 126.0f, 21.0f, // Batch 1, Channel 2, Height (4) x Width (3) - 97.0f, 145.0f, 215.0f, + 97.0f, 145.0f, 215.0f, 115.0f, 116.0f, 238.0f, 226.0f, 16.0f, 132.0f, - 92.0f, 125.0f, 88.0f + 92.0f, 125.0f, 88.0f }; std::vector expectedOutputValues { // Batch 0, Channel 0, Height (4) x Width (3) 235.0f * CalcInvL2Norm({ 235.0f, 113.0f, 56.0f }), - 46.0f * CalcInvL2Norm({ 46.0f, 95.0f, 170.0f }), + 46.0f * CalcInvL2Norm({ 46.0f, 95.0f, 170.0f }), 178.0f * CalcInvL2Norm({ 178.0f, 202.0F, 162.0f }), 100.0f * CalcInvL2Norm({ 100.0f, 77.0f, 194.0f }), 123.0f * CalcInvL2Norm({ 123.0f, 114.0f, 89.0f }), - 19.0f * CalcInvL2Norm({ 19.0f, 71.0f, 254.0f }), + 19.0f * CalcInvL2Norm({ 19.0f, 71.0f, 254.0f }), 172.0f * CalcInvL2Norm({ 172.0f, 122.0f, 12.0f }), - 74.0f * CalcInvL2Norm({ 74.0f, 246.0f, 209.0f }), + 74.0f * CalcInvL2Norm({ 74.0f, 246.0f, 209.0f }), 250.0f * CalcInvL2Norm({ 250.0f, 166.0f, 200.0f }), - 6.0f * CalcInvL2Norm({ 6.0f, 82.0f, 1.0f }), + 6.0f * CalcInvL2Norm({ 6.0f, 82.0f, 1.0f }), 195.0f * CalcInvL2Norm({ 195.0f, 28.0f, 64.0f }), - 80.0f * CalcInvL2Norm({ 80.0f, 37.0f, 54.0f }), + 80.0f * CalcInvL2Norm({ 80.0f, 37.0f, 54.0f }), // Batch 0, Channel 1, Height (4) x Width (3) 113.0f * CalcInvL2Norm({ 235.0f, 113.0f, 56.0f }), - 95.0f * CalcInvL2Norm({ 46.0f, 95.0f, 170.0f }), + 95.0f * CalcInvL2Norm({ 46.0f, 95.0f, 170.0f }), 202.0f * CalcInvL2Norm({ 178.0f, 202.0F, 162.0f }), - 77.0f * CalcInvL2Norm({ 100.0f, 77.0f, 194.0f }), + 77.0f * CalcInvL2Norm({ 100.0f, 77.0f, 194.0f }), 114.0f * CalcInvL2Norm({ 123.0f, 114.0f, 89.0f }), - 71.0f * CalcInvL2Norm({ 19.0f, 71.0f, 254.0f }), + 71.0f * CalcInvL2Norm({ 19.0f, 71.0f, 254.0f }), 122.0f * CalcInvL2Norm({ 172.0f, 122.0f, 12.0f }), 246.0f * CalcInvL2Norm({ 74.0f, 246.0f, 209.0f }), 166.0f * CalcInvL2Norm({ 250.0f, 166.0f, 200.0f }), - 82.0f * CalcInvL2Norm({ 6.0f, 82.0f, 1.0f }), - 28.0f * CalcInvL2Norm({ 195.0f, 28.0f, 64.0f }), - 37.0f * CalcInvL2Norm({ 80.0f, 37.0f, 54.0f }), + 82.0f * CalcInvL2Norm({ 6.0f, 82.0f, 1.0f }), + 28.0f * CalcInvL2Norm({ 195.0f, 28.0f, 64.0f }), + 37.0f * CalcInvL2Norm({ 80.0f, 37.0f, 54.0f }), // Batch 0, Channel 2, Height (4) x Width (3) - 56.0f * CalcInvL2Norm({ 235.0f, 113.0f, 56.0f }), + 56.0f * CalcInvL2Norm({ 235.0f, 113.0f, 56.0f }), 170.0f * CalcInvL2Norm({ 46.0f, 95.0f, 170.0f }), 162.0f * CalcInvL2Norm({ 178.0f, 202.0F, 162.0f }), 194.0f * CalcInvL2Norm({ 100.0f, 77.0f, 194.0f }), - 89.0f * CalcInvL2Norm({ 123.0f, 114.0f, 89.0f }), + 89.0f * CalcInvL2Norm({ 123.0f, 114.0f, 89.0f }), 254.0f * CalcInvL2Norm({ 19.0f, 71.0f, 254.0f }), - 12.0f * CalcInvL2Norm({ 172.0f, 122.0f, 12.0f }), + 12.0f * CalcInvL2Norm({ 172.0f, 122.0f, 12.0f }), 209.0f * CalcInvL2Norm({ 74.0f, 246.0f, 209.0f }), 200.0f * CalcInvL2Norm({ 250.0f, 166.0f, 200.0f }), - 1.0f * CalcInvL2Norm({ 6.0f, 82.0f, 1.0f }), - 64.0f * CalcInvL2Norm({ 195.0f, 28.0f, 64.0f }), - 54.0f * CalcInvL2Norm({ 80.0f, 37.0f, 54.0f }), + 1.0f * CalcInvL2Norm({ 6.0f, 82.0f, 1.0f }), + 64.0f * CalcInvL2Norm({ 195.0f, 28.0f, 64.0f }), + 54.0f * CalcInvL2Norm({ 80.0f, 37.0f, 54.0f }), // Batch 1, Channel 0, Height (4) x Width (3) - 67.0f * CalcInvL2Norm({ 67.0f, 239.0f, 97.0f }), - 90.0f * CalcInvL2Norm({ 90.0f, 104.0f, 145.0f }), - 49.0f * CalcInvL2Norm({ 49.0f, 199.0f, 215.0f }), - 7.0f * CalcInvL2Norm({ 7.0f, 17.0f, 115.0f }), + 67.0f * CalcInvL2Norm({ 67.0f, 239.0f, 97.0f }), + 90.0f * CalcInvL2Norm({ 90.0f, 104.0f, 145.0f }), + 49.0f * CalcInvL2Norm({ 49.0f, 199.0f, 215.0f }), + 7.0f * CalcInvL2Norm({ 7.0f, 17.0f, 115.0f }), 163.0f * CalcInvL2Norm({ 163.0f, 124.0f, 116.0f }), - 18.0f * CalcInvL2Norm({ 18.0f, 153.0f, 238.0f }), - 25.0f * CalcInvL2Norm({ 25.0f, 222.0f, 226.0f }), + 18.0f * CalcInvL2Norm({ 18.0f, 153.0f, 238.0f }), + 25.0f * CalcInvL2Norm({ 25.0f, 222.0f, 226.0f }), 117.0f * CalcInvL2Norm({ 117.0f, 217.0f, 16.0f }), 103.0f * CalcInvL2Norm({ 103.0f, 75.0f, 132.0f }), 247.0f * CalcInvL2Norm({ 247.0f, 32.0f, 92.0f }), - 59.0f * CalcInvL2Norm({ 59.0f, 126.0f, 125.0f }), + 59.0f * CalcInvL2Norm({ 59.0f, 126.0f, 125.0f }), 189.0f * CalcInvL2Norm({ 189.0f, 21.0f, 88.0f }), // Batch 1, Channel 1, Height (4) x Width (3) 239.0f * CalcInvL2Norm({ 67.0f, 239.0f, 97.0f }), 104.0f * CalcInvL2Norm({ 90.0f, 104.0f, 145.0f }), 199.0f * CalcInvL2Norm({ 49.0f, 199.0f, 215.0f }), - 17.0f * CalcInvL2Norm({ 7.0f, 17.0f, 115.0f }), + 17.0f * CalcInvL2Norm({ 7.0f, 17.0f, 115.0f }), 124.0f * CalcInvL2Norm({ 163.0f, 124.0f, 116.0f }), 153.0f * CalcInvL2Norm({ 18.0f, 153.0f, 238.0f }), 222.0f * CalcInvL2Norm({ 25.0f, 222.0f, 226.0f }), 217.0f * CalcInvL2Norm({ 117.0f, 217.0f, 16.0f }), - 75.0f * CalcInvL2Norm({ 103.0f, 75.0f, 132.0f }), - 32.0f * CalcInvL2Norm({ 247.0f, 32.0f, 92.0f }), + 75.0f * CalcInvL2Norm({ 103.0f, 75.0f, 132.0f }), + 32.0f * CalcInvL2Norm({ 247.0f, 32.0f, 92.0f }), 126.0f * CalcInvL2Norm({ 59.0f, 126.0f, 125.0f }), - 21.0f * CalcInvL2Norm({ 189.0f, 21.0f, 88.0f }), + 21.0f * CalcInvL2Norm({ 189.0f, 21.0f, 88.0f }), // Batch 1, Channel 2, Height (4) x Width (3) - 97.0f * CalcInvL2Norm({ 67.0f, 239.0f, 97.0f }), + 97.0f * CalcInvL2Norm({ 67.0f, 239.0f, 97.0f }), 145.0f * CalcInvL2Norm({ 90.0f, 104.0f, 145.0f }), 215.0f * CalcInvL2Norm({ 49.0f, 199.0f, 215.0f }), 115.0f * CalcInvL2Norm({ 7.0f, 17.0f, 115.0f }), 116.0f * CalcInvL2Norm({ 163.0f, 124.0f, 116.0f }), 238.0f * CalcInvL2Norm({ 18.0f, 153.0f, 238.0f }), 226.0f * CalcInvL2Norm({ 25.0f, 222.0f, 226.0f }), - 16.0f * CalcInvL2Norm({ 117.0f, 217.0f, 16.0f }), + 16.0f * CalcInvL2Norm({ 117.0f, 217.0f, 16.0f }), 132.0f * CalcInvL2Norm({ 103.0f, 75.0f, 132.0f }), - 92.0f * CalcInvL2Norm({ 247.0f, 32.0f, 92.0f }), + 92.0f * CalcInvL2Norm({ 247.0f, 32.0f, 92.0f }), 125.0f * CalcInvL2Norm({ 59.0f, 126.0f, 125.0f }), - 88.0f * CalcInvL2Norm({ 189.0f, 21.0f, 88.0f }) + 88.0f * CalcInvL2Norm({ 189.0f, 21.0f, 88.0f }) }; - return L2NormalizationTestImpl(workloadFactory, memoryManager, inputOutputShape, - inputValues, expectedOutputValues, layout); + return L2NormalizationTestImpl(workloadFactory, memoryManager, qScale, qOffset, inputOutputShape, + inputValues, expectedOutputValues, layout); +} + +LayerTestResult L2Normalization4dTest( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout) +{ + return L2Normalization4dTestCommon(workloadFactory, memoryManager, 0.f, 0, layout); +} + +LayerTestResult L2Normalization4dInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout) +{ + return L2Normalization1dTestCommon(workloadFactory, memoryManager, 1.f, 0, + layout); } template> diff --git a/src/backends/backendsCommon/test/LayerTests.hpp b/src/backends/backendsCommon/test/LayerTests.hpp index 8a5a61145c..853a612b78 100644 --- a/src/backends/backendsCommon/test/LayerTests.hpp +++ b/src/backends/backendsCommon/test/LayerTests.hpp @@ -927,21 +927,41 @@ LayerTestResult L2Normalization1dTest( const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::DataLayout layout); +LayerTestResult L2Normalization1dInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout); + LayerTestResult L2Normalization2dTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::DataLayout layout); +LayerTestResult L2Normalization2dInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout); + LayerTestResult L2Normalization3dTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::DataLayout layout); +LayerTestResult L2Normalization3dInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout); + LayerTestResult L2Normalization4dTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::DataLayout layout); +LayerTestResult L2Normalization4dInt16Test( + armnn::IWorkloadFactory& workloadFactory, + const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, + const armnn::DataLayout layout); + LayerTestResult ConstantTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager); -- cgit v1.2.1