aboutsummaryrefslogtreecommitdiff
path: root/1.2
diff options
context:
space:
mode:
authorAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-07-23 14:01:37 +0100
committerAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-07-23 14:33:20 +0100
commit573a8fa04ef0774fe44a4ac6669582a6bf753d6e (patch)
treeefb5f466c71af293597506ee77dcf9f944c6559f /1.2
parent074c25a1535b648fdf19d7f6648e8aceef9aa7ad (diff)
downloadandroid-nn-driver-573a8fa04ef0774fe44a4ac6669582a6bf753d6e.tar.gz
IVGCVSW-3553 Fix failing zero_sized tests
Signed-off-by: Aron Virginas-Tar <Aron.Virginas-Tar@arm.com> Change-Id: Idd10f34babc0d2552d599872b853ba5fb5c98351
Diffstat (limited to '1.2')
-rw-r--r--1.2/HalPolicy.cpp17
1 files changed, 9 insertions, 8 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index 3c00388c..f93629ef 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -6,6 +6,7 @@
#include "HalPolicy.hpp"
#include "OutputShapeUtils.hpp"
+#include "Utils.hpp"
#include "../1.0/HalPolicy.hpp"
#include "../1.1/HalPolicy.hpp"
@@ -270,7 +271,7 @@ bool HalPolicy::ConvertConv2d(const Operation& operation, const Model& model, Co
desc.m_BiasEnabled = true;
armnn::Optional<armnn::TensorInfo> biases(bias.GetInfo());
- if (IsDynamicOutput(outputInfo))
+ if (IsDynamicTensor(outputInfo))
{
try
{
@@ -450,7 +451,7 @@ bool HalPolicy::ConvertDepthwiseConv2d(const Operation& operation, const Model&
armnn::Optional<armnn::TensorInfo> biases(bias.GetInfo());
armnn::TensorInfo outputInfo = GetTensorInfoForOperand(*output);
- if (IsDynamicOutput(outputInfo))
+ if (IsDynamicTensor(outputInfo))
{
try
{
@@ -522,7 +523,7 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C
}
armnn::TensorInfo outInfo = GetTensorInfoForOperand(*outputOperand);
- if (IsDynamicOutput(outInfo))
+ if (IsDynamicTensor(outInfo))
{
ALOGD("Output shape not set, will infer from inputs");
outInfo.SetShape(InferMaximumOutputShape(input0.GetTensorInfo().GetShape(), input1.GetTensorInfo().GetShape()));
@@ -571,7 +572,7 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C
}
armnn::TensorInfo outputInfo = GetTensorInfoForOperand(*output);
- if (IsDynamicOutput(outputInfo))
+ if (IsDynamicTensor(outputInfo))
{
ALOGD("Output shape not set, will infer from inputs");
outputInfo.SetShape(InferMinimumOutputShape(input0.GetTensorInfo().GetShape(),
@@ -628,7 +629,7 @@ bool HalPolicy::ConvertPadV2(const Operation& operation, const Model& model, Con
}
armnn::TensorInfo outputInfo = GetTensorInfoForOperand(*output);
- if (IsDynamicOutput(outputInfo))
+ if (IsDynamicTensor(outputInfo))
{
ALOGD("Output shape not set, will infer from inputs");
outputInfo.SetShape(InferPadOutputShape(inputInfo.GetShape(), descriptor.m_PadList));
@@ -726,7 +727,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
const armnn::TensorInfo& alphaInfo = alpha.GetTensorInfo();
armnn::TensorInfo outputInfo = GetTensorInfoForOperand(*output);
- if (IsDynamicOutput(outputInfo))
+ if (IsDynamicTensor(outputInfo))
{
ALOGD("Output shape not set, will infer from inputs");
outputInfo.SetShape(InferPreluOutputShape(inputInfo.GetShape(), alphaInfo.GetShape()));
@@ -848,7 +849,7 @@ bool HalPolicy::ConvertResize(const Operation& operation,
return false;
}
- if (IsDynamicOutput(outputInfo))
+ if (IsDynamicTensor(outputInfo))
{
try
{
@@ -961,7 +962,7 @@ bool HalPolicy::ConvertSoftmax(const Operation& operation, const Model& model, C
}
armnn::TensorInfo outputInfo = GetTensorInfoForOperand(*outputOperand);
- if (IsDynamicOutput(outputInfo))
+ if (IsDynamicTensor(outputInfo))
{
ALOGD("Output shape not set, will infer from input");
outputInfo.SetShape(input.GetTensorInfo().GetShape());