aboutsummaryrefslogtreecommitdiff
path: root/1.2
diff options
context:
space:
mode:
Diffstat (limited to '1.2')
-rw-r--r--1.2/ArmnnDriverImpl.cpp4
-rw-r--r--1.2/HalPolicy.cpp5
2 files changed, 4 insertions, 5 deletions
diff --git a/1.2/ArmnnDriverImpl.cpp b/1.2/ArmnnDriverImpl.cpp
index 87ef08c2..3b2cb744 100644
--- a/1.2/ArmnnDriverImpl.cpp
+++ b/1.2/ArmnnDriverImpl.cpp
@@ -80,7 +80,7 @@ Return<ErrorStatus> ArmnnDriverImpl::prepareArmnnModel_1_2(const armnn::IRuntime
const sp<V1_2::IPreparedModelCallback>& cb,
bool float32ToFloat16)
{
- ALOGV("ArmnnDriverImpl::prepareModel()");
+ ALOGV("ArmnnDriverImpl::prepareArmnnModel_1_2()");
if (cb.get() == nullptr)
{
@@ -290,4 +290,4 @@ Return<void> ArmnnDriverImpl::getCapabilities_1_2(const armnn::IRuntimePtr& runt
}
} // namespace hal_1_2
-} // namespace armnn_driver \ No newline at end of file
+} // namespace armnn_driver
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index 1c6159ef..e058e026 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -443,7 +443,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
if (!output)
{
- return Fail("%s: Could not read output 0", __func__);
+ return Fail("%s: Could not read output", __func__);
}
const armnn::TensorInfo& inputInfo = input.GetTensorInfo();
@@ -467,8 +467,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
return Fail("%s: AddPreluLayer failed", __func__);
}
- input.Connect(layer->GetInputSlot(0));
- alpha.Connect(layer->GetInputSlot(1));
+ BroadcastTensor(input, alpha, layer, *data.m_Network);
return SetupAndTrackLayerOutputSlot<hal_1_2::HalPolicy>(operation, 0, *layer, model, data);
}