aboutsummaryrefslogtreecommitdiff
path: root/src/backends/neon/workloads/NeonFullyConnectedWorkload.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/backends/neon/workloads/NeonFullyConnectedWorkload.cpp')
-rw-r--r--src/backends/neon/workloads/NeonFullyConnectedWorkload.cpp9
1 files changed, 4 insertions, 5 deletions
diff --git a/src/backends/neon/workloads/NeonFullyConnectedWorkload.cpp b/src/backends/neon/workloads/NeonFullyConnectedWorkload.cpp
index 8cebb4f48f..51fd7af362 100644
--- a/src/backends/neon/workloads/NeonFullyConnectedWorkload.cpp
+++ b/src/backends/neon/workloads/NeonFullyConnectedWorkload.cpp
@@ -69,22 +69,22 @@ NeonFullyConnectedWorkload::NeonFullyConnectedWorkload(const FullyConnectedQueue
// Allocate
if (m_Data.m_Weight->GetTensorInfo().GetDataType() == DataType::QuantisedAsymm8)
{
- InitialiseArmComputeTensorData(*m_WeightsTensor, m_Data.m_Weight->GetConstTensor<uint8_t>());
+ InitializeArmComputeTensorData(*m_WeightsTensor, m_Data.m_Weight);
}
else
{
- InitializeArmComputeTensorDataForFloatTypes(*m_WeightsTensor, m_Data.m_Weight);
+ InitializeArmComputeTensorData(*m_WeightsTensor, m_Data.m_Weight);
}
if (m_BiasesTensor)
{
if (m_Data.m_Bias->GetTensorInfo().GetDataType() == DataType::Signed32)
{
- InitialiseArmComputeTensorData(*m_BiasesTensor, m_Data.m_Bias->GetConstTensor<int32_t>());
+ InitializeArmComputeTensorData(*m_BiasesTensor, m_Data.m_Bias);
}
else
{
- InitializeArmComputeTensorDataForFloatTypes(*m_BiasesTensor, m_Data.m_Bias);
+ InitializeArmComputeTensorData(*m_BiasesTensor, m_Data.m_Bias);
}
}
@@ -107,4 +107,3 @@ void NeonFullyConnectedWorkload::FreeUnusedTensors()
}
} //namespace armnn
-