aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorMike Kelly <mike.kelly@arm.com>2021-09-01 17:09:12 +0100
committermike.kelly <mike.kelly@arm.com>2021-09-02 08:26:01 +0000
commit00e9ebf026b1e2f6dbbed201ce1abe0091d6453b (patch)
tree0aaffd6e63f4cbdba1651e2fe25218868525eb56 /tests
parentbd575b270f65601ff7bdfdc58de45b9675d5541a (diff)
downloadarmnn-00e9ebf026b1e2f6dbbed201ce1abe0091d6453b.tar.gz
GitHub #572 ExecuteNetwork Segfault for TFLite Delegate
* Fixed Segfault when optional input tensor shapes aren't provided Signed-off-by: Mike Kelly <mike.kelly@arm.com> Change-Id: Ib18be62d3654020037e6101976d58297b2a983c4
Diffstat (limited to 'tests')
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp29
1 files changed, 18 insertions, 11 deletions
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 9a4864542f..16d34c8c9d 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -89,10 +89,17 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
int input = tfLiteInterpreter->inputs()[inputIndex];
TfLiteIntArray* inputDims = tfLiteInterpreter->tensor(input)->dims;
- long inputSize = 1;
- for (unsigned int dim = 0; dim < static_cast<unsigned int>(inputDims->size); ++dim)
+ unsigned int inputSize = 1;
+ if (params.m_InputTensorShapes.size() > 0)
{
- inputSize *= inputDims->data[dim];
+ inputSize = params.m_InputTensorShapes[inputIndex]->GetNumElements();
+ }
+ else
+ {
+ for (unsigned int dim = 0; dim < static_cast<unsigned int>(inputDims->size); ++dim)
+ {
+ inputSize *= inputDims->data[dim];
+ }
}
if (params.m_InputTypes[inputIndex].compare("float") == 0)
@@ -108,10 +115,10 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
std::vector<float> tensorData;
PopulateTensorWithDataGeneric<float>(tensorData,
- params.m_InputTensorShapes[inputIndex]->GetNumElements(),
- dataFile,
- [](const std::string& s)
- { return std::stof(s); });
+ inputSize,
+ dataFile,
+ [](const std::string& s)
+ { return std::stof(s); });
std::copy(tensorData.begin(), tensorData.end(), inputData);
}
@@ -128,7 +135,7 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
std::vector<int8_t> tensorData;
PopulateTensorWithDataGeneric<int8_t>(tensorData,
- params.m_InputTensorShapes[inputIndex]->GetNumElements(),
+ inputSize,
dataFile,
[](const std::string& s)
{ return armnn::numeric_cast<int8_t>(std::stoi(s)); });
@@ -148,7 +155,7 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
std::vector<int32_t> tensorData;
PopulateTensorWithDataGeneric<int32_t>(tensorData,
- params.m_InputTensorShapes[inputIndex]->GetNumElements(),
+ inputSize,
dataFile,
[](const std::string& s)
{ return std::stoi(s); });
@@ -169,7 +176,7 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
std::vector<uint8_t> tensorData;
PopulateTensorWithDataGeneric<uint8_t>(tensorData,
- params.m_InputTensorShapes[inputIndex]->GetNumElements(),
+ inputSize,
dataFile,
[](const std::string& s)
{ return armnn::numeric_cast<uint8_t>(std::stoi(s)); });
@@ -189,7 +196,7 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params,
std::vector<int8_t> tensorData;
PopulateTensorWithDataGeneric<int8_t>(tensorData,
- params.m_InputTensorShapes[inputIndex]->GetNumElements(),
+ inputSize,
dataFile,
[](const std::string& s)
{ return armnn::numeric_cast<int8_t>(std::stoi(s)); });