aboutsummaryrefslogtreecommitdiff
path: root/1.2/HalPolicy.cpp
diff options
context:
space:
mode:
Diffstat (limited to '1.2/HalPolicy.cpp')
-rw-r--r--1.2/HalPolicy.cpp6
1 files changed, 3 insertions, 3 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index bf4525aa..c9b4dadb 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -1347,7 +1347,7 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C
IConnectableLayer* layer = data.m_Network->AddMaximumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, outInfo, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1402,7 +1402,7 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C
IConnectableLayer* const layer = data.m_Network->AddMinimumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1565,7 +1565,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
return Fail("%s: AddPreluLayer failed", __func__);
}
- bool isReshapeSupported = BroadcastTensor(input, alpha, outputInfo, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data);
if (!isReshapeSupported)
{
return false;