34 const std::vector<float>& inputValues,
35 const std::vector<float>& expectedOutputValues,
42 auto inputTensor = armnnUtils::QuantizedVector<T>(inputValues, qScale, qOffset);
45 std::vector<T> expectedOutput = armnnUtils::QuantizedVector<T>(expectedOutputValues, qScale, qOffset);
47 std::unique_ptr<armnn::ITensorHandle> inputHandle = tensorHandleFactory.
CreateTensorHandle(inputInfo);
48 std::unique_ptr<armnn::ITensorHandle> outputHandle = tensorHandleFactory.
CreateTensorHandle(outputInfo);
52 AddInputToWorkload(descriptor, info, inputInfo, inputHandle.get());
53 AddOutputToWorkload(descriptor, info, outputInfo, outputHandle.get());
55 std::unique_ptr<armnn::IWorkload> workload = workloadFactory.
CreateLogSoftmax(descriptor, info);
57 inputHandle->Allocate();
58 outputHandle->Allocate();
62 ExecuteWorkload(*workload, memoryManager);
68 outputHandle->GetShape(),
75 template<armnn::DataType ArmnnType,
typename T>
86 std::vector<float> inputValues
92 std::vector<float> expectedOutputValues
94 -4.14297f, -10.14297f, -2.14297f, -0.14297f,
95 -7.00104f, -12.00104f, -0.00105f, -9.00104f
102 return LogSoftmaxTestImpl<ArmnnType, 4>(
109 expectedOutputValues,
113 template<armnn::DataType ArmnnType,
typename T>
124 std::vector<float> inputValues
130 std::vector<float> expectedOutputValues
132 -4.14297f, -10.14297f, -2.14297f, -0.14297f,
133 -7.00104f, -12.00104f, -0.00105f, -9.00104f
140 return LogSoftmaxTestImpl<ArmnnType, 4>(
147 expectedOutputValues,
151 template<armnn::DataType ArmnnType,
typename T>
162 std::vector<float> inputValues
164 0.0f, -0.6f, 0.2f, 0.4f,
165 0.3f, -0.2f, 1.0f, 0.1f
168 std::vector<float> expectedOutputValues
170 -4.14297f, -10.14297f, -2.14297f, -0.14297f,
171 -7.00104f, -12.00104f, -0.00105f, -9.00104f
178 return LogSoftmaxTestImpl<ArmnnType, 4>(
185 expectedOutputValues,
189 template<armnn::DataType ArmnnType,
typename T>
200 std::vector<float> inputValues
206 std::vector<float> expectedOutputValues
208 -3.048587f, -4.018149f, -8.000336f, -0.048587f,
209 -0.048587f, -0.018149f, -0.000335f, -3.048587f
216 return LogSoftmaxTestImpl<ArmnnType, 4>(
223 expectedOutputValues,
228 LogSoftmaxTest1<armnn::DataType::Float32>(
233 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 4>
234 LogSoftmaxTest2<armnn::DataType::Float32>(
239 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 4>
240 LogSoftmaxTest3<armnn::DataType::Float32>(
245 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 4>
246 LogSoftmaxTest4<armnn::DataType::Float32>(
252 LogSoftmaxTest1<armnn::DataType::Float16>(
257 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 4>
258 LogSoftmaxTest2<armnn::DataType::Float16>(
263 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 4>
264 LogSoftmaxTest3<armnn::DataType::Float16>(
269 template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 4>
270 LogSoftmaxTest4<armnn::DataType::Float16>(
int m_Axis
Scalar, defaulted to the last index (-1), specifying the dimension the activation will be performed o...
const TensorShape & GetShape() const
virtual std::unique_ptr< IWorkload > CreateLogSoftmax(const LogSoftmaxQueueDescriptor &descriptor, const WorkloadInfo &info) const
float m_Beta
Exponentiation value.
LayerTestResult< T, 4 > LogSoftmaxTest2(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager, const armnn::ITensorHandleFactory &tensorHandleFactory)
typename ResolveTypeImpl< DT >::Type ResolveType
void IgnoreUnused(Ts &&...)
LayerDescriptor m_Parameters
LayerTestResult< T, 4 > LogSoftmaxTest1(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager, const armnn::ITensorHandleFactory &tensorHandleFactory)
std::shared_ptr< IMemoryManager > IMemoryManagerSharedPtr
void CopyDataFromITensorHandle(void *memory, const armnn::ITensorHandle *tensorHandle)
LayerTestResult< T, 4 > LogSoftmaxTest3(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager, const armnn::ITensorHandleFactory &tensorHandleFactory)
Contains information about TensorInfos of a layer.
LayerTestResult< T, 4 > LogSoftmaxTest4(armnn::IWorkloadFactory &workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager, const armnn::ITensorHandleFactory &tensorHandleFactory)
virtual std::unique_ptr< ITensorHandle > CreateTensorHandle(const TensorInfo &tensorInfo) const =0
unsigned int GetNumElements() const
void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)