aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/backends/ArmComputeUtils.hpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/backends/ArmComputeUtils.hpp')
-rw-r--r--src/armnn/backends/ArmComputeUtils.hpp12
1 files changed, 10 insertions, 2 deletions
diff --git a/src/armnn/backends/ArmComputeUtils.hpp b/src/armnn/backends/ArmComputeUtils.hpp
index c451e6434b..3c57fb59b7 100644
--- a/src/armnn/backends/ArmComputeUtils.hpp
+++ b/src/armnn/backends/ArmComputeUtils.hpp
@@ -36,7 +36,7 @@ CreateAclNormalizationLayerInfoForL2Normalization(const armnn::TensorInfo& tenso
// For the reference implementation, to make alpha_ become 1, we'd have to use alpha = normSize instead.
const float alpha = 1.0f;
- // Don't offset the reduction
+ // Don't offset the reduction.
const float kappa = 0.0f;
// pow(reduction, -0.5) = 1 / sqrt(reduction)
@@ -53,7 +53,7 @@ ConvertActivationFunctionToAclActivationFunction(ActivationFunction armnnFunctio
switch (armnnFunction)
{
case ActivationFunction::Linear: return AclActivationFunction::LINEAR;
- // Arm compute's 'logistic' function is non-parameterized, so it is exactly a sigmoid function
+ // Arm compute's 'logistic' function is non-parameterized, so it is exactly a sigmoid function.
case ActivationFunction::Sigmoid: return AclActivationFunction::LOGISTIC;
case ActivationFunction::ReLu: return AclActivationFunction::RELU;
case ActivationFunction::BoundedReLu: return AclActivationFunction::LU_BOUNDED_RELU;
@@ -112,6 +112,14 @@ ConvertNormalizationAlgorithmChannelToAclNormType(NormalizationAlgorithmChannel
}
}
+inline arm_compute::FullyConnectedLayerInfo
+ConvertFullyConnectedDescriptorToAclFullyConnectedLayerInfo(const FullyConnectedDescriptor& fullyConnectedDesc)
+{
+ arm_compute::FullyConnectedLayerInfo fc_info;
+ fc_info.transpose_weights = fullyConnectedDesc.m_TransposeWeightMatrix;
+ return fc_info;
+}
+
}
#endif // ARMCOMPUTENEON_ENABLED || ARMCOMPUTECL_ENABLED