aboutsummaryrefslogtreecommitdiff
path: root/ConversionUtils_1_2.hpp
diff options
context:
space:
mode:
authorTeresa Charlin <teresa.charlinreyes@arm.com>2021-09-01 14:19:38 +0100
committerTeresa Charlin <teresa.charlinreyes@arm.com>2021-09-23 14:08:03 +0100
commit7f5b51e6b66771bdb17be056ddee386fd8a79e3e (patch)
tree50a3d8befad9f17100fce004c238755a8367e329 /ConversionUtils_1_2.hpp
parent6bf3f43c6f1d88c676f277f4c3c766cb3ea37ed0 (diff)
downloadandroid-nn-driver-7f5b51e6b66771bdb17be056ddee386fd8a79e3e.tar.gz
IVGCVSW-3715 Add Channel Shuffle support
!armnn:6211 Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com> Change-Id: Ie5e5d160f6d826c30f1fff298a4a73151c1ccdbb
Diffstat (limited to 'ConversionUtils_1_2.hpp')
-rw-r--r--ConversionUtils_1_2.hpp84
1 files changed, 84 insertions, 0 deletions
diff --git a/ConversionUtils_1_2.hpp b/ConversionUtils_1_2.hpp
index 594b8e1c..404ff32d 100644
--- a/ConversionUtils_1_2.hpp
+++ b/ConversionUtils_1_2.hpp
@@ -172,6 +172,90 @@ bool ConvertCast(const HalOperation& operation,
template<typename HalPolicy,
typename HalOperation = typename HalPolicy::Operation,
typename HalModel = typename HalPolicy::Model>
+bool ConvertChannelShuffle(const HalOperation& operation,
+ const HalModel& model,
+ ConversionData& data)
+{
+ using HalOperand = typename HalPolicy::Operand;
+ using HalOperandType = typename HalPolicy::OperandType;
+
+ ALOGV("HalPolicy::ConvertChannelShuffle()");
+
+ LayerInputHandle input = ConvertToLayerInputHandle<HalPolicy>(operation, 0, model, data);
+ if (!input.IsValid())
+ {
+ return Fail("%s: Operation has invalid inputs", __func__);
+ }
+ auto inputDimensions = static_cast<int32_t>(input.GetTensorInfo().GetNumDimensions());
+
+ ChannelShuffleDescriptor descriptor;
+
+ int32_t groups;
+ if (!GetInputScalar<HalPolicy>(operation, 1, HalOperandType::INT32, groups, model, data))
+ {
+ return Fail("%s: Operation has invalid or unsupported number of groups operand", __func__);
+ }
+ descriptor.m_NumGroups = static_cast<uint32_t>(groups);
+
+ int32_t axis;
+ if (!GetInputScalar<HalPolicy>(operation, 2, HalOperandType::INT32, axis, model, data))
+ {
+ return Fail("%s: Operation has invalid or unsupported dimension channel shuffle operand", __func__);
+ }
+ if (((axis < -inputDimensions) && (axis < 0)) || ((axis >= inputDimensions) && (axis > 0)))
+ {
+ return Fail("%s: Operation has invalid dimension: %d. It is out of bounds [-%d, %d))", __func__, axis,
+ inputDimensions, inputDimensions);
+ }
+ int positiveAxis = (axis < 0) ? inputDimensions + axis : axis;
+ descriptor.m_Axis = static_cast<uint32_t>(positiveAxis);
+
+ const HalOperand* output = GetOutputOperand<HalPolicy>(operation, 0, model);
+ if (!output)
+ {
+ return Fail("%s: Could not read output 0", __func__);
+ }
+
+ const TensorInfo& inputInfo = input.GetTensorInfo();
+ const TensorInfo& outputInfo = GetTensorInfoForOperand(*output);
+
+ bool isSupported = false;
+
+ auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported)
+ {
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsChannelShuffleSupported,
+ data.m_Backends,
+ isSupported,
+ inputInfo,
+ outputInfo,
+ descriptor);
+ };
+
+ if(!IsDynamicTensor(outputInfo))
+ {
+ validateFunc(outputInfo, isSupported);
+ }
+ else
+ {
+ isSupported = AreDynamicTensorsSupported();
+ }
+
+ if (!isSupported)
+ {
+ return false;
+ }
+
+ IConnectableLayer* layer = data.m_Network->AddChannelShuffleLayer(descriptor);
+ assert(layer != nullptr);
+ input.Connect(layer->GetInputSlot(0));
+
+ return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data, nullptr, validateFunc);
+}
+
+template<typename HalPolicy,
+ typename HalOperation = typename HalPolicy::Operation,
+ typename HalModel = typename HalPolicy::Model>
bool ConvertComparison_1_2(const HalOperation& operation,
const HalModel& model,
ConversionData& data,