aboutsummaryrefslogtreecommitdiff
path: root/src/backends/reference
diff options
context:
space:
mode:
Diffstat (limited to 'src/backends/reference')
-rw-r--r--src/backends/reference/RefLayerSupport.cpp82
-rw-r--r--src/backends/reference/test/RefCreateWorkloadTests.cpp5
-rw-r--r--src/backends/reference/test/RefLayerTests.cpp8
3 files changed, 81 insertions, 14 deletions
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp
index b6da628be3..9be1ed6d74 100644
--- a/src/backends/reference/RefLayerSupport.cpp
+++ b/src/backends/reference/RefLayerSupport.cpp
@@ -12,6 +12,7 @@
#include <armnn/Descriptors.hpp>
#include <backendsCommon/BackendRegistry.hpp>
+#include <backendsCommon/test/WorkloadTestUtils.hpp>
#include <boost/core/ignore_unused.hpp>
@@ -108,9 +109,29 @@ struct TypeAnyOf : public Rule
TypeAnyOf(const TensorInfo& info, const Container& c)
{
m_Res = std::any_of(c.begin(), c.end(), [&info](DataType dt)
- {
- return dt == info.GetDataType();
- });
+ {
+ return dt == info.GetDataType();
+ });
+ }
+};
+
+struct BiasAndWeightsTypesMatch : public Rule
+{
+ BiasAndWeightsTypesMatch(const TensorInfo& biases, const TensorInfo& weights)
+ {
+ m_Res = biases.GetDataType() == GetBiasTypeFromWeightsType(weights.GetDataType()).value();
+ }
+};
+
+struct BiasAndWeightsTypesCompatible : public Rule
+{
+ template<typename Container>
+ BiasAndWeightsTypesCompatible(const TensorInfo& info, const Container& c)
+ {
+ m_Res = std::any_of(c.begin(), c.end(), [&info](DataType dt)
+ {
+ return dt == GetBiasTypeFromWeightsType(info.GetDataType()).value();
+ });
}
};
@@ -569,14 +590,53 @@ bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
const FullyConnectedDescriptor& descriptor,
Optional<std::string&> reasonIfUnsupported) const
{
- ignore_unused(output);
- ignore_unused(weights);
- ignore_unused(biases);
- ignore_unused(descriptor);
- return IsSupportedForDataTypeRef(reasonIfUnsupported,
- input.GetDataType(),
- &TrueFunc<>,
- &TrueFunc<>);
+ bool supported = true;
+
+ // Define supported types.
+ std::array<DataType,3> supportedTypes =
+ {
+ DataType::Float32,
+ DataType::QuantisedAsymm8,
+ DataType::QuantisedSymm16
+ };
+
+ supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
+ "Reference Fully Connected: input type not supported.");
+
+ supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
+ "Reference Fully Connected: output type not supported.");
+
+ supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
+ "Reference Fully Connected: input and output types mismatched.");
+
+ supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
+ "Reference Fully Connected: weights type not supported.");
+
+ supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
+ "Reference Fully Connected: input and weight types mismatched.");
+
+ if (descriptor.m_BiasEnabled)
+ {
+ // Defined supported types for bias
+ std::array<DataType, 2>
+ supportedBiasTypes =
+ {
+ DataType::Float32,
+ DataType::Signed32
+ };
+
+ supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
+ "Reference Fully Connected: bias type not supported.");
+
+ supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
+ "Reference Fully Connected: bias and weight types mismatch.");
+
+ supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
+ "Reference Fully Connected: bias type inferred from weights is incompatible.");
+
+ }
+
+ return supported;
}
bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
diff --git a/src/backends/reference/test/RefCreateWorkloadTests.cpp b/src/backends/reference/test/RefCreateWorkloadTests.cpp
index 48b85cb9de..95da7abad1 100644
--- a/src/backends/reference/test/RefCreateWorkloadTests.cpp
+++ b/src/backends/reference/test/RefCreateWorkloadTests.cpp
@@ -327,6 +327,11 @@ BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadQuantisedAsymm8)
RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::QuantisedAsymm8>();
}
+BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadQuantisedAsymm16)
+{
+ RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::QuantisedSymm16>();
+}
+
template <typename NormalizationWorkloadType, armnn::DataType DataType>
static void RefCreateNormalizationWorkloadTest(DataLayout dataLayout)
{
diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp
index ed8f02f46d..1207c1d648 100644
--- a/src/backends/reference/test/RefLayerTests.cpp
+++ b/src/backends/reference/test/RefLayerTests.cpp
@@ -234,11 +234,13 @@ ARMNN_AUTO_TEST_CASE(SquareInt16, SquareInt16Test)
ARMNN_AUTO_TEST_CASE(TanhInt16, TanhInt16Test)
-// Fully Conected
+// Fully Connected
ARMNN_AUTO_TEST_CASE(SimpleFullyConnected, FullyConnectedFloat32Test, false, false)
-ARMNN_AUTO_TEST_CASE(FullyConnectedUint8, FullyConnectedUint8Test, false)
+ARMNN_AUTO_TEST_CASE(FullyConnectedUint8, FullyConnectedTest<armnn::DataType::QuantisedAsymm8>, false)
+ARMNN_AUTO_TEST_CASE(FullyConnectedQSymm16, FullyConnectedTest<armnn::DataType::QuantisedSymm16>, false)
ARMNN_AUTO_TEST_CASE(SimpleFullyConnectedWithBias, FullyConnectedFloat32Test, true, false)
-ARMNN_AUTO_TEST_CASE(FullyConnectedBiasedUint8, FullyConnectedUint8Test, true)
+ARMNN_AUTO_TEST_CASE(FullyConnectedBiasedUint8, FullyConnectedTest<armnn::DataType::QuantisedAsymm8>, true)
+ARMNN_AUTO_TEST_CASE(FullyConnectedBiasedQSymm16, FullyConnectedTest<armnn::DataType::QuantisedSymm16>, true)
ARMNN_AUTO_TEST_CASE(SimpleFullyConnectedWithTranspose, FullyConnectedFloat32Test, false, true)
ARMNN_AUTO_TEST_CASE(FullyConnectedLarge, FullyConnectedLargeTest, false)