// // Copyright © 2017 Arm Ltd. All rights reserved. // SPDX-License-Identifier: MIT // #include "SoftmaxTestImpl.hpp" #include #include #include #include #include #include #include namespace { struct Simple3dSoftmaxOutputData { const std::vector outputData = { 0.0964599f, 0.26220518f, 0.0964599f, 0.0964599f, 0.15903549f, 0.0964599f, 0.0964599f, 0.0964599f }; const armnn::TensorShape inputShape{ 1, 8, 1 }; const std::vector inputData = { 0.0f, 1.0f, 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f, }; }; struct Simple4dSoftmaxData { const armnn::TensorShape inputShape{ 1, 8, 1, 1 }; const std::vector outputData = { 0.0964599f, 0.26220518f, 0.0964599f, 0.0964599f, 0.15903549f, 0.0964599f, 0.0964599f, 0.0964599f }; const std::vector inputData = { 0.0f, 1.0f, 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f }; }; template> LayerTestResult SimpleSoftmaxBaseTestImpl( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta, const armnn::TensorShape& inputShape, const std::vector& outputData, const std::vector& inputData, int axis = 1) { IgnoreUnused(memoryManager); using std::exp; const float qScale = 1.f / 256.f; const int qOffset = 0; armnn::TensorInfo inputTensorInfo; armnn::TensorInfo outputTensorInfo; inputTensorInfo = armnn::TensorInfo(inputShape, ArmnnType); inputTensorInfo.SetQuantizationScale(qScale); inputTensorInfo.SetQuantizationOffset(qOffset); outputTensorInfo = armnn::TensorInfo(inputShape, ArmnnType); outputTensorInfo.SetQuantizationScale(qScale); outputTensorInfo.SetQuantizationOffset(qOffset); LayerTestResult ret(outputTensorInfo); // Each row is independently softmax'd. auto input = MakeTensor(inputTensorInfo, armnnUtils::QuantizedVector(inputData, qScale, qOffset)); std::unique_ptr inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo); std::unique_ptr outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo); armnn::SoftmaxQueueDescriptor data; data.m_Parameters.m_Beta = beta; data.m_Parameters.m_Axis = axis; armnn::WorkloadInfo info; AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get()); AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get()); std::unique_ptr workload = workloadFactory.CreateSoftmax(data, info); inputHandle->Allocate(); outputHandle->Allocate(); CopyDataToITensorHandle(inputHandle.get(), input.origin()); ARMNN_ASSERT(workload); ExecuteWorkload(*workload, memoryManager); CopyDataFromITensorHandle(ret.output.origin(), outputHandle.get()); std::vector expectedOutput = armnnUtils::QuantizedVector(outputData, qScale, qOffset); ret.outputExpected = MakeTensor(outputTensorInfo, expectedOutput); return ret; } template> LayerTestResult SimpleSoftmaxTestImpl( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { using std::exp; const armnn::TensorShape inputShape{ 2, 4 }; float x0[4] = { exp((0.f - 1.0f) * beta), exp((1.0f - 1.0f) * beta), exp((0.0f - 1.0f) * beta), exp((0.0f - 1.0f) * beta) }; float sum0 = x0[0] + x0[1] + x0[2] + x0[3]; float x1[4] = { exp((0.5f - 0.5f) * beta), exp((0.0f - 0.5f) * beta), exp((0.0f - 0.5f) * beta), exp((0.0f - 0.5f) * beta) }; float sum1 = x1[0] + x1[1] + x1[2] + x1[3]; const std::vector outputData = { x0[0] / sum0, x0[1] / sum0, x0[2] / sum0, x0[3] / sum0, x1[0] / sum1, x1[1] / sum1, x1[2] / sum1, x1[3] / sum1 }; const std::vector inputData = { 0.f, 1.f, 0.f, 0.f, .5f, 0.f, 0.f, 0.f, }; return SimpleSoftmaxBaseTestImpl(workloadFactory, memoryManager, beta, inputShape, outputData, inputData); } template> LayerTestResult SimpleSoftmaxTestImpl( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta, int axis) { armnn::TensorShape inputShape; std::vector inputData; std::vector outputData; switch (axis) { case -2: case 0: { inputShape = {5, 2}; inputData = { 17.0f, -1.0f, 16.0f, -2.0f, 15.0f, -3.0f, 14.0f, -4.0f, 1.0f, -17.0f }; outputData = { 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f }; break; } case -1: case 1: { inputShape = {2, 5}; inputData = { 17.0f, 16.0f, 15.0f, 14.0f, 1.0f, -1.0f, -2.0f, -3.0f, -4.0f, -17.0f }; outputData = { 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f }; break; } } return SimpleSoftmaxBaseTestImpl(workloadFactory, memoryManager, beta, inputShape, outputData, inputData, axis); } template> LayerTestResult Simple3dSoftmaxTestImpl( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta, const armnn::TensorShape& inputShape, const std::vector& outputData, const std::vector& inputData, int axis = 1) { return SimpleSoftmaxBaseTestImpl(workloadFactory, memoryManager, beta, inputShape, outputData, inputData, axis); } template> LayerTestResult Simple4dSoftmaxTestImpl( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta, const armnn::TensorShape& inputShape, const std::vector& outputData, const std::vector& inputData, int axis = 1) { return SimpleSoftmaxBaseTestImpl(workloadFactory, memoryManager, beta, inputShape, outputData, inputData, axis); } template> LayerTestResult CompareSoftmaxTestImpl( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, armnn::IWorkloadFactory& refWorkloadFactory, float beta) { const int batchSize = 20; const int channels = 30; armnn::TensorInfo inputTensorInfo; armnn::TensorInfo outputTensorInfo; unsigned int inputShape[] = { batchSize, channels }; inputTensorInfo = armnn::TensorInfo(2, inputShape, ArmnnType); outputTensorInfo = armnn::TensorInfo(2, inputShape, ArmnnType); float qScale = 1.f / 256.f; int qOffset = 0; inputTensorInfo.SetQuantizationScale(qScale); inputTensorInfo.SetQuantizationOffset(qOffset); outputTensorInfo.SetQuantizationScale(qScale); outputTensorInfo.SetQuantizationOffset(qOffset); LayerTestResult ret(outputTensorInfo); auto input = MakeRandomTensor(inputTensorInfo, 0xF00D, 0.0f, 1.0f); std::unique_ptr inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo); std::unique_ptr outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo); armnn::SoftmaxQueueDescriptor data; data.m_Parameters.m_Beta = beta; armnn::WorkloadInfo info; AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get()); AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get()); std::unique_ptr outputHandleRef = refWorkloadFactory.CreateTensorHandle(outputTensorInfo); std::unique_ptr inputHandleRef = refWorkloadFactory.CreateTensorHandle(inputTensorInfo); armnn::SoftmaxQueueDescriptor refData = data; armnn::WorkloadInfo refInfo = info; SetWorkloadInput(refData, refInfo, 0, inputTensorInfo, inputHandleRef.get()); SetWorkloadOutput(refData, refInfo, 0, outputTensorInfo, outputHandleRef.get()); std::unique_ptr workload = workloadFactory.CreateSoftmax(data, info); std::unique_ptr workloadRef = refWorkloadFactory.CreateSoftmax(refData, refInfo); outputHandleRef->Allocate(); inputHandleRef->Allocate(); inputHandle->Allocate(); outputHandle->Allocate(); CopyDataToITensorHandle(inputHandle.get(), &input[0][0]); CopyDataToITensorHandle(inputHandleRef.get(), &input[0][0]); ExecuteWorkload(*workload, memoryManager); workloadRef->Execute(); CopyDataFromITensorHandle(&ret.output[0][0], outputHandle.get()); CopyDataFromITensorHandle(&ret.outputExpected[0][0], outputHandleRef.get()); return ret; } } // anonymous namespace LayerTestResult SimpleSoftmaxTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { return SimpleSoftmaxTestImpl(workloadFactory, memoryManager, beta); } LayerTestResult SimpleAxisSoftmaxTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta, int axis) { return SimpleSoftmaxTestImpl(workloadFactory, memoryManager, beta, axis); } LayerTestResult Simple3dSoftmaxTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple3dSoftmaxOutputData data; return Simple3dSoftmaxTestImpl(workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult Simple3dAxisSoftmaxTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta, int axis) { armnn::TensorShape inputShape; std::vector inputData; std::vector outputData; switch (axis) { case -3: case 0: { inputShape = {5, 2, 2}; inputData = { 17.0f, -1.0f, 17.0f, -1.0f, 16.0f, -2.0f, 16.0f, -2.0f, 15.0f, -3.0f, 15.0f, -3.0f, 14.0f, -4.0f, 14.0f, -4.0f, 1.0f, -17.0f, 1.0f, -17.0f }; outputData = { 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f }; break; } case -2: case 1: { inputShape = {2, 5, 2}; inputData = { 17.0f, -1.0f, 16.0f, -2.0f, 15.0f, -3.0f, 14.0f, -4.0f, 1.0f, -17.0f, 17.0f, -1.0f, 16.0f, -2.0f, 15.0f, -3.0f, 14.0f, -4.0f, 1.0f, -17.0f }; outputData = { 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f }; break; } case -1: case 2: { inputShape = {2, 2, 5}; inputData = { 17.0f, 16.0f, 15.0f, 14.0f, 1.0f, -1.0f, -2.0f, -3.0f, -4.0f, -17.0f, 17.0f, 16.0f, 15.0f, 14.0f, 1.0f, -1.0f, -2.0f, -3.0f, -4.0f, -17.0f }; outputData = { 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f }; break; } } return Simple3dSoftmaxTestImpl(workloadFactory, memoryManager, beta, inputShape, outputData, inputData, axis); } LayerTestResult Simple4dSoftmaxTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple4dSoftmaxData data; return Simple4dSoftmaxTestImpl(workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult Simple4dAxisSoftmaxTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta, int axis) { armnn::TensorShape inputShape; std::vector inputData; std::vector outputData; switch (axis) { case -4: case 0: { inputShape = {5, 2, 2, 2}; inputData = { 17.0f, -1.0f, 17.0f, -1.0f, 17.0f, -1.0f, 17.0f, -1.0f, 16.0f, -2.0f, 16.0f, -2.0f, 16.0f, -2.0f, 16.0f, -2.0f, 15.0f, -3.0f, 15.0f, -3.0f, 15.0f, -3.0f, 15.0f, -3.0f, 14.0f, -4.0f, 14.0f, -4.0f, 14.0f, -4.0f, 14.0f, -4.0f, 1.0f, -17.0f, 1.0f, -17.0f, 1.0f, -17.0f, 1.0f, -17.0f }; outputData = { 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f }; break; } case -3: case 1: { inputShape = {2, 5, 2, 2}; inputData = { 17.0f, -1.0f, 17.0f, -1.0f, 16.0f, -2.0f, 16.0f, -2.0f, 15.0f, -3.0f, 15.0f, -3.0f, 14.0f, -4.0f, 14.0f, -4.0f, 1.0f, -17.0f, 1.0f, -17.0f, 17.0f, -1.0f, 17.0f, -1.0f, 16.0f, -2.0f, 16.0f, -2.0f, 15.0f, -3.0f, 15.0f, -3.0f, 14.0f, -4.0f, 14.0f, -4.0f, 1.0f, -17.0f, 1.0f, -17.0f }; outputData = { 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f, 7.246299848982885e-08f }; break; } case -2: case 2: { inputShape = {2, 2, 5, 2}; inputData = { 17.0f, -1.0f, 16.0f, -2.0f, 15.0f, -3.0f, 14.0f, -4.0f, 1.0f, -17.0f, 17.0f, -1.0f, 16.0f, -2.0f, 15.0f, -3.0f, 14.0f, -4.0f, 1.0f, -17.0f, 17.0f, -1.0f, 16.0f, -2.0f, 15.0f, -3.0f, 14.0f, -4.0f, 1.0f, -17.0f, 17.0f, -1.0f, 16.0f, -2.0f, 15.0f, -3.0f, 14.0f, -4.0f, 1.0f, -17.0f }; outputData = { 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f, 0.643914213228014f, 0.643914213228014f, 0.236882800924671f, 0.236882800924671f, 0.087144312427294f, 0.087144312427294f, 0.032058600957022f, 0.032058600957022f, 7.246299848982885e-08f, 7.246299848982885e-08f }; break; } case -1: case 3: { inputShape = {2, 2, 2, 5}; inputData = { 17.0f, 16.0f, 15.0f, 14.0f, 1.0f, -1.0f, -2.0f, -3.0f, -4.0f, -17.0f, 17.0f, 16.0f, 15.0f, 14.0f, 1.0f, -1.0f, -2.0f, -3.0f, -4.0f, -17.0f, 17.0f, 16.0f, 15.0f, 14.0f, 1.0f, -1.0f, -2.0f, -3.0f, -4.0f, -17.0f, 17.0f, 16.0f, 15.0f, 14.0f, 1.0f, -1.0f, -2.0f, -3.0f, -4.0f, -17.0f }; outputData = { 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f, 0.643914213228014f, 0.236882800924671f, 0.087144312427294f, 0.032058600957022f, 7.246299848982885e-08f }; break; } } return Simple4dSoftmaxTestImpl( workloadFactory, memoryManager, beta, inputShape, outputData, inputData, axis); } LayerTestResult SimpleSoftmaxUint8Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { return SimpleSoftmaxTestImpl(workloadFactory, memoryManager, beta); } LayerTestResult Simple3dSoftmaxUint8Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple3dSoftmaxOutputData data; return Simple3dSoftmaxTestImpl( workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult Simple4dSoftmaxUint8Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple4dSoftmaxData data; return Simple4dSoftmaxTestImpl(workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult SimpleSoftmaxFloat16Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { return SimpleSoftmaxTestImpl(workloadFactory, memoryManager, beta); } LayerTestResult Simple3dSoftmaxFloat16Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple3dSoftmaxOutputData data; return Simple3dSoftmaxTestImpl(workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult Simple4dSoftmaxFloat16Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple4dSoftmaxData data; return Simple4dSoftmaxTestImpl(workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult SimpleSoftmaxUint16Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { return SimpleSoftmaxTestImpl(workloadFactory, memoryManager, beta); } LayerTestResult Simple3dSoftmaxUint16Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple3dSoftmaxOutputData data; return Simple3dSoftmaxTestImpl(workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult Simple4dSoftmaxUint16Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, float beta) { Simple4dSoftmaxData data; return Simple4dSoftmaxTestImpl(workloadFactory, memoryManager, beta, data.inputShape, data.outputData, data.inputData); } LayerTestResult CompareSoftmaxTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, armnn::IWorkloadFactory& refWorkloadFactory, float beta) { return CompareSoftmaxTestImpl( workloadFactory, memoryManager, refWorkloadFactory, beta); } LayerTestResult CompareSoftmaxUint8Test( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, armnn::IWorkloadFactory& refWorkloadFactory, float beta) { return CompareSoftmaxTestImpl( workloadFactory, memoryManager, refWorkloadFactory, beta); }