aboutsummaryrefslogtreecommitdiff
path: root/test/DriverTestHelpers.cpp
diff options
context:
space:
mode:
authorNikhil Raj <nikhil.raj@arm.com>2018-09-03 11:25:56 +0100
committerMatthew Bentham <matthew.bentham@arm.com>2018-09-18 12:40:40 +0100
commit77605826a353981d41f0ee346850d411770535f8 (patch)
tree5cdd4c8cb6768d7651089105ee2884f536d8a34c /test/DriverTestHelpers.cpp
parentb0717b5241a15e3e4d37a1b51b6e5fd9a92a664f (diff)
downloadandroid-nn-driver-77605826a353981d41f0ee346850d411770535f8.tar.gz
IVGCVSW-1713 Create a minimum unit test to compare the results
before and after passing the FP16 flag in the Android-nn-driver Change-Id: If8d4ca12421c3bee2526eec98f11d393af822373
Diffstat (limited to 'test/DriverTestHelpers.cpp')
-rw-r--r--test/DriverTestHelpers.cpp79
1 files changed, 18 insertions, 61 deletions
diff --git a/test/DriverTestHelpers.cpp b/test/DriverTestHelpers.cpp
index 11154912..ded24592 100644
--- a/test/DriverTestHelpers.cpp
+++ b/test/DriverTestHelpers.cpp
@@ -109,70 +109,32 @@ void AddPoolAndSetData(uint32_t size, Request& request, const float* data)
memcpy(dst, data, size * sizeof(float));
}
-void AddOperand(neuralnetworks::V1_0::Model& model, const Operand& op)
-{
- model.operands.resize(model.operands.size() + 1);
- model.operands[model.operands.size() - 1] = op;
-}
-
-void AddIntOperand(neuralnetworks::V1_0::Model& model, int32_t value)
-{
- DataLocation location = {};
- location.offset = model.operandValues.size();
- location.length = sizeof(int32_t);
-
- Operand op = {};
- op.type = OperandType::INT32;
- op.dimensions = hidl_vec<uint32_t>{};
- op.lifetime = OperandLifeTime::CONSTANT_COPY;
- op.location = location;
-
- model.operandValues.resize(model.operandValues.size() + location.length);
- *reinterpret_cast<int32_t*>(&model.operandValues[location.offset]) = value;
-
- AddOperand(model, op);
-}
-
-void AddInputOperand(neuralnetworks::V1_0::Model& model,
- hidl_vec<uint32_t> dimensions,
- neuralnetworks::V1_0::OperandType operandType)
-{
- Operand op = {};
- op.type = operandType;
- op.dimensions = dimensions;
- op.lifetime = OperandLifeTime::MODEL_INPUT;
-
- AddOperand(model, op);
-
- model.inputIndexes.resize(model.inputIndexes.size() + 1);
- model.inputIndexes[model.inputIndexes.size() - 1] = model.operands.size() - 1;
-}
-
-void AddOutputOperand(neuralnetworks::V1_0::Model& model,
- hidl_vec<uint32_t> dimensions,
- neuralnetworks::V1_0::OperandType operandType)
+android::sp<IPreparedModel> PrepareModelWithStatus(const neuralnetworks::V1_0::Model& model,
+ armnn_driver::ArmnnDriver& driver,
+ ErrorStatus& prepareStatus,
+ ErrorStatus expectedStatus)
{
- Operand op = {};
- op.type = operandType;
- op.scale = operandType == neuralnetworks::V1_0::OperandType::TENSOR_QUANT8_ASYMM ? 1.f / 255.f : 0.f;
- op.dimensions = dimensions;
- op.lifetime = OperandLifeTime::MODEL_OUTPUT;
-
- AddOperand(model, op);
+ android::sp<PreparedModelCallback> cb(new PreparedModelCallback());
+ driver.prepareModel(model, cb);
- model.outputIndexes.resize(model.outputIndexes.size() + 1);
- model.outputIndexes[model.outputIndexes.size() - 1] = model.operands.size() - 1;
+ prepareStatus = cb->GetErrorStatus();
+ BOOST_TEST(prepareStatus == expectedStatus);
+ if (expectedStatus == ErrorStatus::NONE)
+ {
+ BOOST_TEST((cb->GetPreparedModel() != nullptr));
+ }
+ return cb->GetPreparedModel();
}
+#if defined(ARMNN_ANDROID_NN_V1_1) // Using ::android::hardware::neuralnetworks::V1_1.
-android::sp<IPreparedModel> PrepareModelWithStatus(const neuralnetworks::V1_0::Model& model,
+android::sp<IPreparedModel> PrepareModelWithStatus(const neuralnetworks::V1_1::Model& model,
armnn_driver::ArmnnDriver& driver,
- ErrorStatus & prepareStatus,
+ ErrorStatus& prepareStatus,
ErrorStatus expectedStatus)
{
-
android::sp<PreparedModelCallback> cb(new PreparedModelCallback());
- driver.prepareModel(model, cb);
+ driver.prepareModel_1_1(model, neuralnetworks::V1_1::ExecutionPreference::LOW_POWER, cb);
prepareStatus = cb->GetErrorStatus();
BOOST_TEST(prepareStatus == expectedStatus);
@@ -183,12 +145,7 @@ android::sp<IPreparedModel> PrepareModelWithStatus(const neuralnetworks::V1_0::M
return cb->GetPreparedModel();
}
-android::sp<IPreparedModel> PrepareModel(const neuralnetworks::V1_0::Model& model,
- armnn_driver::ArmnnDriver& driver)
-{
- ErrorStatus prepareStatus = ErrorStatus::NONE;
- return PrepareModelWithStatus(model, driver, prepareStatus);
-}
+#endif
ErrorStatus Execute(android::sp<IPreparedModel> preparedModel,
const Request& request,