aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorMatthew Bentham <matthew.bentham@arm.com>2019-04-09 13:10:46 +0100
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-04-15 15:29:37 +0000
commit3e68b97946bfee3c89ec2d4363a22550a10b2e55 (patch)
treed7d9be27aff46a18ac9442092d908223f435dcd7 /tests
parent200e38039cf2cef21ae9ba6f86fab6fd524e5077 (diff)
downloadarmnn-3e68b97946bfee3c89ec2d4363a22550a10b2e55.tar.gz
IVGCVSW-2928 Fix issue with GPU profiling
Correctly enable GPU profiling when test profiling is enabled. Remove extra copy of the profiling-enabled flag from InferenceModel::Params and correctly pass around the copy that is in InferenceTestOptions. !referencetests:180329 Change-Id: I0daa1bab2e7068fc479bf417a553183b1d922166 Signed-off-by: Matthew Bentham <matthew.bentham@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp5
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp3
-rw-r--r--tests/InferenceModel.hpp8
-rw-r--r--tests/InferenceTest.cpp2
-rw-r--r--tests/InferenceTest.hpp7
-rw-r--r--tests/InferenceTest.inl10
-rw-r--r--tests/MobileNetSsdInferenceTest.hpp7
-rw-r--r--tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp5
-rw-r--r--tests/YoloInferenceTest.hpp7
9 files changed, 30 insertions, 24 deletions
diff --git a/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
index c6ffe3d989..c136672c48 100644
--- a/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
+++ b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
@@ -22,7 +22,8 @@ int main(int argc, char* argv[])
{
return make_unique<YoloTestCaseProvider<YoloInferenceModel>>(
[&]
- (typename YoloInferenceModel::CommandLineOptions modelOptions)
+ (const InferenceTestOptions &commonOptions,
+ typename YoloInferenceModel::CommandLineOptions modelOptions)
{
if (!ValidateDirectory(modelOptions.m_ModelDir))
{
@@ -39,7 +40,7 @@ int main(int argc, char* argv[])
modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
- return std::make_unique<YoloInferenceModel>(modelParams);
+ return std::make_unique<YoloInferenceModel>(modelParams, commonOptions.m_EnableProfiling);
});
});
}
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 9ac66d1cd3..1de22ed5d0 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -236,10 +236,9 @@ int MainImpl(const char* modelPath,
params.m_OutputBindings.push_back(outputName);
}
- params.m_EnableProfiling = enableProfiling;
params.m_SubgraphId = subgraphId;
params.m_EnableFp16TurboMode = enableFp16TurboMode;
- InferenceModel<TParser, TDataType> model(params, runtime);
+ InferenceModel<TParser, TDataType> model(params, enableProfiling, runtime);
for(unsigned int i = 0; i < inputTensorDataFilePaths.size(); ++i)
{
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index e168923048..cb6daefa9e 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -87,7 +87,6 @@ struct Params
std::vector<armnn::TensorShape> m_InputShapes;
std::vector<std::string> m_OutputBindings;
std::vector<armnn::BackendId> m_ComputeDevices;
- bool m_EnableProfiling;
size_t m_SubgraphId;
bool m_IsModelBinary;
bool m_VisualizePostOptimizationModel;
@@ -95,7 +94,6 @@ struct Params
Params()
: m_ComputeDevices{"CpuRef"}
- , m_EnableProfiling(false)
, m_SubgraphId(0)
, m_IsModelBinary(true)
, m_VisualizePostOptimizationModel(false)
@@ -428,8 +426,10 @@ public:
"to FP16 where the backend supports it.");
}
- InferenceModel(const Params& params, const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
- : m_EnableProfiling(params.m_EnableProfiling)
+ InferenceModel(const Params& params,
+ bool enableProfiling,
+ const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
+ : m_EnableProfiling(enableProfiling)
{
if (runtime)
{
diff --git a/tests/InferenceTest.cpp b/tests/InferenceTest.cpp
index 7413de97dd..89e78def2f 100644
--- a/tests/InferenceTest.cpp
+++ b/tests/InferenceTest.cpp
@@ -82,7 +82,7 @@ bool ParseCommandLine(int argc, char** argv, IInferenceTestCaseProvider& testCas
return false;
}
- if (!testCaseProvider.ProcessCommandLineOptions())
+ if (!testCaseProvider.ProcessCommandLineOptions(outParams))
{
return false;
}
diff --git a/tests/InferenceTest.hpp b/tests/InferenceTest.hpp
index 91a65ea494..3ebfdbcc3c 100644
--- a/tests/InferenceTest.hpp
+++ b/tests/InferenceTest.hpp
@@ -91,7 +91,7 @@ public:
virtual ~IInferenceTestCaseProvider() {}
virtual void AddCommandLineOptions(boost::program_options::options_description& options) {};
- virtual bool ProcessCommandLineOptions() { return true; };
+ virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions) { return true; };
virtual std::unique_ptr<IInferenceTestCase> GetTestCase(unsigned int testCaseId) = 0;
virtual bool OnInferenceTestFinished() { return true; };
};
@@ -219,7 +219,7 @@ public:
ClassifierTestCaseProvider(TConstructDatabaseCallable constructDatabase, TConstructModelCallable constructModel);
virtual void AddCommandLineOptions(boost::program_options::options_description& options) override;
- virtual bool ProcessCommandLineOptions() override;
+ virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions) override;
virtual std::unique_ptr<IInferenceTestCase> GetTestCase(unsigned int testCaseId) override;
virtual bool OnInferenceTestFinished() override;
@@ -227,7 +227,8 @@ private:
void ReadPredictions();
typename InferenceModel::CommandLineOptions m_ModelCommandLineOptions;
- std::function<std::unique_ptr<InferenceModel>(typename InferenceModel::CommandLineOptions)> m_ConstructModel;
+ std::function<std::unique_ptr<InferenceModel>(const InferenceTestOptions& commonOptions,
+ typename InferenceModel::CommandLineOptions)> m_ConstructModel;
std::unique_ptr<InferenceModel> m_Model;
std::string m_DataDir;
diff --git a/tests/InferenceTest.inl b/tests/InferenceTest.inl
index 538720bd83..0112037bc3 100644
--- a/tests/InferenceTest.inl
+++ b/tests/InferenceTest.inl
@@ -162,7 +162,8 @@ void ClassifierTestCaseProvider<TDatabase, InferenceModel>::AddCommandLineOption
}
template <typename TDatabase, typename InferenceModel>
-bool ClassifierTestCaseProvider<TDatabase, InferenceModel>::ProcessCommandLineOptions()
+bool ClassifierTestCaseProvider<TDatabase, InferenceModel>::ProcessCommandLineOptions(
+ const InferenceTestOptions& commonOptions)
{
if (!ValidateDirectory(m_DataDir))
{
@@ -171,7 +172,7 @@ bool ClassifierTestCaseProvider<TDatabase, InferenceModel>::ProcessCommandLineOp
ReadPredictions();
- m_Model = m_ConstructModel(m_ModelCommandLineOptions);
+ m_Model = m_ConstructModel(commonOptions, m_ModelCommandLineOptions);
if (!m_Model)
{
return false;
@@ -336,7 +337,8 @@ int ClassifierInferenceTestMain(int argc,
return make_unique<TestCaseProvider>(constructDatabase,
[&]
- (typename InferenceModel::CommandLineOptions modelOptions)
+ (const InferenceTestOptions &commonOptions,
+ typename InferenceModel::CommandLineOptions modelOptions)
{
if (!ValidateDirectory(modelOptions.m_ModelDir))
{
@@ -358,7 +360,7 @@ int ClassifierInferenceTestMain(int argc,
modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
- return std::make_unique<InferenceModel>(modelParams);
+ return std::make_unique<InferenceModel>(modelParams, commonOptions.m_EnableProfiling);
});
});
}
diff --git a/tests/MobileNetSsdInferenceTest.hpp b/tests/MobileNetSsdInferenceTest.hpp
index bbbf957dcf..7beedf8a62 100644
--- a/tests/MobileNetSsdInferenceTest.hpp
+++ b/tests/MobileNetSsdInferenceTest.hpp
@@ -156,14 +156,14 @@ public:
Model::AddCommandLineOptions(options, m_ModelCommandLineOptions);
}
- virtual bool ProcessCommandLineOptions() override
+ virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions) override
{
if (!ValidateDirectory(m_DataDir))
{
return false;
}
- m_Model = m_ConstructModel(m_ModelCommandLineOptions);
+ m_Model = m_ConstructModel(commonOptions, m_ModelCommandLineOptions);
if (!m_Model)
{
return false;
@@ -191,7 +191,8 @@ public:
private:
typename Model::CommandLineOptions m_ModelCommandLineOptions;
- std::function<std::unique_ptr<Model>(typename Model::CommandLineOptions)> m_ConstructModel;
+ std::function<std::unique_ptr<Model>(const InferenceTestOptions &,
+ typename Model::CommandLineOptions)> m_ConstructModel;
std::unique_ptr<Model> m_Model;
std::string m_DataDir;
diff --git a/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp
index 3328339318..06196475bf 100644
--- a/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp
+++ b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp
@@ -40,7 +40,8 @@ int main(int argc, char* argv[])
{
return make_unique<MobileNetSsdTestCaseProvider<Model>>(
[&]
- (typename Model::CommandLineOptions modelOptions)
+ (const InferenceTestOptions& commonOptions,
+ typename Model::CommandLineOptions modelOptions)
{
if (!ValidateDirectory(modelOptions.m_ModelDir))
{
@@ -63,7 +64,7 @@ int main(int argc, char* argv[])
modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
- return std::make_unique<Model>(modelParams);
+ return std::make_unique<Model>(modelParams, commonOptions.m_EnableProfiling);
});
});
}
diff --git a/tests/YoloInferenceTest.hpp b/tests/YoloInferenceTest.hpp
index 922bcb8582..eb6b22767b 100644
--- a/tests/YoloInferenceTest.hpp
+++ b/tests/YoloInferenceTest.hpp
@@ -194,14 +194,14 @@ public:
Model::AddCommandLineOptions(options, m_ModelCommandLineOptions);
}
- virtual bool ProcessCommandLineOptions() override
+ virtual bool ProcessCommandLineOptions(const InferenceTestOptions &commonOptions) override
{
if (!ValidateDirectory(m_DataDir))
{
return false;
}
- m_Model = m_ConstructModel(m_ModelCommandLineOptions);
+ m_Model = m_ConstructModel(commonOptions, m_ModelCommandLineOptions);
if (!m_Model)
{
return false;
@@ -229,7 +229,8 @@ public:
private:
typename Model::CommandLineOptions m_ModelCommandLineOptions;
- std::function<std::unique_ptr<Model>(typename Model::CommandLineOptions)> m_ConstructModel;
+ std::function<std::unique_ptr<Model>(const InferenceTestOptions&,
+ typename Model::CommandLineOptions)> m_ConstructModel;
std::unique_ptr<Model> m_Model;
std::string m_DataDir;